code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote process implementation."""
from clusterfuzz._internal.metrics import logs
from clusterfuzz._internal.protos import untrusted_runner_pb2
from clusterfuzz._internal.system import new_process
from clusterfuzz._internal.system import process_handler
from . import protobuf_utils
def process_result_to_proto(process_result):
"""Convert new_process.ProcessResult to proto."""
process_result_proto = untrusted_runner_pb2.ProcessResult(
return_code=process_result.return_code,
output=process_result.output,
time_executed=process_result.time_executed,
timed_out=process_result.timed_out)
process_result_proto.command.extend(process_result.command)
return process_result_proto
def run_and_wait(request, _):
"""Implementation of RunAndWait."""
process_runner = new_process.ProcessRunner(request.executable_path,
request.default_args)
args = {}
protobuf_utils.get_protobuf_field(args, request.popen_args, 'bufsize')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'executable')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'shell')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'cwd')
if request.popen_args.env_is_set:
args['env'] = request.popen_args.env
else:
args['env'] = None
args['additional_args'] = request.additional_args
protobuf_utils.get_protobuf_field(args, request, 'timeout')
protobuf_utils.get_protobuf_field(args, request, 'terminate_before_kill')
protobuf_utils.get_protobuf_field(args, request, 'terminate_wait_time')
protobuf_utils.get_protobuf_field(args, request, 'input_data')
protobuf_utils.get_protobuf_field(args, request, 'max_stdout_len')
logs.log('Running command: %s' % process_runner.get_command())
return untrusted_runner_pb2.RunAndWaitResponse(
result=process_result_to_proto(process_runner.run_and_wait(**args)))
def run_process(request, _):
"""Implementation of RunProcess."""
args = {}
protobuf_utils.get_protobuf_field(args, request, 'cmdline')
protobuf_utils.get_protobuf_field(args, request, 'current_working_directory')
protobuf_utils.get_protobuf_field(args, request, 'timeout')
protobuf_utils.get_protobuf_field(args, request, 'need_shell')
if request.gestures:
args['gestures'] = request.gestures
if request.env_copy:
args['env_copy'] = request.env_copy
protobuf_utils.get_protobuf_field(args, request, 'testcase_run')
protobuf_utils.get_protobuf_field(args, request, 'ignore_children')
return_code, execution_time, output = process_handler.run_process(**args)
response = untrusted_runner_pb2.RunProcessResponse(
return_code=return_code, execution_time=execution_time, output=output)
return response
|
google/clusterfuzz
|
src/clusterfuzz/_internal/bot/untrusted_runner/remote_process.py
|
Python
|
apache-2.0
| 3,341
|
import gevent
import gevent.pool
import uuid
import logging
def get_trace(greenlet=None):
greenlet = greenlet or gevent.getcurrent()
if not hasattr(greenlet, '_iris_trace'):
greenlet._iris_trace = {}
return greenlet._iris_trace
def spawn(*args, **kwargs):
greenlet = gevent.Greenlet(*args, **kwargs)
greenlet._iris_trace = get_trace().copy()
greenlet.start()
return greenlet
_spawn = spawn
class Group(gevent.pool.Group):
def spawn(self, *args, **kwargs):
g = _spawn(*args, **kwargs)
self.add(g)
return g
def trace(**kwargs):
get_trace().update(kwargs)
def set_id(trace_id=None):
trace_id = trace_id or uuid.uuid4().hex
trace(iris_trace_id=trace_id)
return trace_id
def get_id():
return get_trace().get('iris_trace_id')
class TraceFormatter(logging.Formatter):
def format(self, record):
record.trace_id = get_id()
return super(TraceFormatter, self).format(record)
|
kpanic/lymph
|
iris/core/trace.py
|
Python
|
apache-2.0
| 983
|
default_app_config = 'leonardo.apps.LeonardoConfig'
__import__('pkg_resources').declare_namespace(__name__)
try:
from leonardo.base import leonardo # noqa
except ImportError:
import warnings
def simple_warn(message, category, filename, lineno, file=None, line=None):
return '%s: %s' % (category.__name__, message)
msg = ("Could not import Leonardo dependencies. "
"This is normal during installation.\n")
warnings.formatwarning = simple_warn
warnings.warn(msg, Warning)
|
amboycharlie/Child-Friendly-LCMS
|
leonardo/__init__.py
|
Python
|
apache-2.0
| 520
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from .common import BaseTest
import jmespath
class TestApacheAirflow(BaseTest):
def test_airflow_environment_value_filter(self):
session_factory = self.replay_flight_data('test_airflow_environment_value_filter')
p = self.load_policy(
{
"name": "airflow-name-filter",
"resource": "airflow",
"filters": [
{
"type": "value",
"key": "Name",
"op": "eq",
"value": "testEnvironment",
}
]
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], 'testEnvironment')
self.assertEqual(resources[0]['c7n:MatchedFilters'], ['Name'])
def test_airflow_environment_kms_filter(self):
session_factory = self.replay_flight_data('test_airflow_environment_kms_filter')
kms = session_factory().client('kms')
expression = 'KmsKey'
p = self.load_policy(
{
"name": "airflow-kms-filter",
"resource": "airflow",
"filters": [
{
"type": "kms-key",
"key": "c7n:AliasName",
"value": "alias/mwaa",
}
]
},
session_factory=session_factory,
)
resources = p.run()
self.assertTrue(len(resources), 1)
aliases = kms.list_aliases(KeyId=(jmespath.search(expression, resources[0])))
self.assertEqual(aliases['Aliases'][0]['AliasName'], 'alias/mwaa')
def test_airflow_environment_tag(self):
session_factory = self.replay_flight_data('test_airflow_environment_tag')
new_tag = {'env': 'dev'}
p = self.load_policy(
{
'name': 'airflow-tag',
'resource': 'airflow',
'filters': [{
'tag:env': 'absent'
}],
'actions': [{
'type': 'tag',
'tags': new_tag
}]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(1, len(resources))
name = resources[0].get('Name')
airflow = session_factory().client('mwaa')
call = airflow.get_environment(Name=name)
self.assertEqual(new_tag, call['Environment'].get('Tags'))
def test_airflow_environment_untag(self):
session_factory = self.replay_flight_data('test_airflow_environment_untag')
p = self.load_policy(
{
'name': 'airflow-untag',
'resource': 'airflow',
'filters': [{
'tag:env': 'dev'
}],
'actions': [{
'type': 'remove-tag',
'tags': ['env']
}]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(1, len(resources))
name = resources[0].get('Name')
airflow = session_factory().client('mwaa')
call = airflow.get_environment(Name=name)
self.assertEqual({}, call['Environment'].get('Tags'))
|
thisisshi/cloud-custodian
|
tests/test_airflow.py
|
Python
|
apache-2.0
| 3,504
|
'''
Created on Aug 29, 2015
@author: kevinchien
'''
import datetime
# from bson import ObjectId
from tornado.gen import Task, Return
from tornado.gen import coroutine
from src.common.logutil import get_logger
# from src.core.mongoutil import get_instance
#
# @coroutine
# def update_auth(auth_info):
# new_auth_info = auth_info.copy()
# new_auth_info['updated_at'] = datetime.datetime.utcnow()
#
# criteria = {"user_id": new_auth_info.get('user_id'),
# "access_token": new_auth_info.get('access_token'),
# "refresh_token": new_auth_info.get('refresh_token')}
#
# fields = {'$set': new_auth_info}
#
# result, error = yield Task(get_instance().auth_info.update, criteria, fields)
# if error is not None:
# raise error
#
# raise Return(result)
|
cchienhao/data_collector
|
src/collectors/fitbit/dao.py
|
Python
|
apache-2.0
| 841
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Drive database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import gdrive as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import gdrive
from tests.parsers.sqlite_plugins import test_lib
class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Drive database plugin."""
def testProcess(self):
"""Tests the Process function on a Google Drive database file."""
plugin = gdrive.GoogleDrivePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 30)
# Let's verify that we've got the correct balance of cloud and local
# entry events.
# 10 files mounting to:
# 20 Cloud Entries (two timestamps per entry).
# 10 Local Entries (one timestamp per entry).
local_entries = []
cloud_entries = []
for event in storage_writer.GetEvents():
event_data = self._GetEventDataOfEvent(storage_writer, event)
if event_data.data_type == 'gdrive:snapshot:local_entry':
local_entries.append(event)
else:
cloud_entries.append(event)
self.assertEqual(len(local_entries), 10)
self.assertEqual(len(cloud_entries), 20)
# Test one local and one cloud entry.
event = local_entries[5]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
file_path = (
'%local_sync_root%/Top Secret/Enn meiri '
'leyndarmál/Sýnileiki - Örverpi.gdoc')
self.assertEqual(event_data.path, file_path)
expected_message = 'File Path: {0:s} Size: 184'.format(file_path)
self._TestGetMessageStrings(
event_data, expected_message, file_path)
event = cloud_entries[16]
self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.document_type, 6)
expected_url = (
'https://docs.google.com/document/d/'
'1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api')
self.assertEqual(event_data.url, expected_url)
expected_message = (
'File Path: /Almenningur/Saklausa hliðin '
'[Private] '
'Size: 0 '
'URL: {0:s} '
'Type: DOCUMENT').format(expected_url)
expected_short_message = '/Almenningur/Saklausa hliðin'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
|
rgayon/plaso
|
tests/parsers/sqlite_plugins/gdrive.py
|
Python
|
apache-2.0
| 2,876
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ReportErrorsServiceGrpcTransport
from .transports.grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport
class ReportErrorsServiceClientMeta(type):
"""Metaclass for the ReportErrorsService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ReportErrorsServiceTransport]]
_transport_registry["grpc"] = ReportErrorsServiceGrpcTransport
_transport_registry["grpc_asyncio"] = ReportErrorsServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[ReportErrorsServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class ReportErrorsServiceClient(metaclass=ReportErrorsServiceClientMeta):
"""An API for reporting error events."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "clouderrorreporting.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ReportErrorsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ReportErrorsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> ReportErrorsServiceTransport:
"""Returns the transport used by the client instance.
Returns:
ReportErrorsServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, ReportErrorsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the report errors service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ReportErrorsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, ReportErrorsServiceTransport):
# transport is a ReportErrorsServiceTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def report_error_event(
self,
request: Union[report_errors_service.ReportErrorEventRequest, dict] = None,
*,
project_name: str = None,
event: report_errors_service.ReportedErrorEvent = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> report_errors_service.ReportErrorEventResponse:
r"""Report an individual error event and record the event to a log.
This endpoint accepts **either** an OAuth token, **or** an `API
key <https://support.google.com/cloud/answer/6158862>`__ for
authentication. To use an API key, append it to the URL as the
value of a ``key`` parameter. For example:
``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456``
**Note:** `Error Reporting </error-reporting>`__ is a global
service built on Cloud Logging and doesn't analyze logs stored
in regional log buckets or logs routed to other Google Cloud
projects.
For more information, see `Using Error Reporting with
regionalized logs </error-reporting/docs/regionalization>`__.
.. code-block:: python
from google.cloud import errorreporting_v1beta1
def sample_report_error_event():
# Create a client
client = errorreporting_v1beta1.ReportErrorsServiceClient()
# Initialize request argument(s)
event = errorreporting_v1beta1.ReportedErrorEvent()
event.message = "message_value"
request = errorreporting_v1beta1.ReportErrorEventRequest(
project_name="project_name_value",
event=event,
)
# Make the request
response = client.report_error_event(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]):
The request object. A request for reporting an
individual error event.
project_name (str):
Required. The resource name of the Google Cloud Platform
project. Written as ``projects/{projectId}``, where
``{projectId}`` is the `Google Cloud Platform project
ID <https://support.google.com/cloud/answer/6158840>`__.
Example: // ``projects/my-project-123``.
This corresponds to the ``project_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
event (google.cloud.errorreporting_v1beta1.types.ReportedErrorEvent):
Required. The error event to be
reported.
This corresponds to the ``event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse:
Response for reporting an individual
error event. Data may be added to this
message in the future.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_name, event])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a report_errors_service.ReportErrorEventRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, report_errors_service.ReportErrorEventRequest):
request = report_errors_service.ReportErrorEventRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_name is not None:
request.project_name = project_name
if event is not None:
request.event = event
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.report_error_event]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("project_name", request.project_name),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-errorreporting",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ReportErrorsServiceClient",)
|
googleapis/python-error-reporting
|
google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py
|
Python
|
apache-2.0
| 22,790
|
from boten.core import BaseBot
import payloads
class TestBot(BaseBot):
def command_arg_bot(self, user_name):
yield "hello {}".format(user_name)
def command_no_arg_bot(self):
yield "hello"
def command_optional_arg_bot(self, optional="default"):
yield "hello {}".format(optional)
def command_two_message_bot(self):
yield "message1"
yield "message2"
def foo(self):
pass
def test_available_commands():
bot = TestBot({})
available_commands = bot.commands
assert "arg_bot" in available_commands
assert "no_arg_bot" in available_commands
assert "optional_arg_bot" in available_commands
assert "two_message_bot" in available_commands
assert "foo" not in available_commands
def test_arg_bot_with_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.arg_bot_with_arg))
assert response[0] == "hello derp"
def test_arg_bot_with_no_args():
bot = TestBot({})
response = list(bot.run_command(payloads.arg_bot_with_no_args))
assert response[0].startswith("Got TypeError") # Help message
def test_no_arg_bot_without_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_without_arg))
assert response[0] == "hello"
def test_no_arg_bot_with_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_with_arg))
assert response[0].startswith("Got TypeError") # Help message
def test_optional_arg_bot_with_optional_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.optional_arg_bot_with_optional_arg))
assert response[0] == 'hello derp'
def test_optional_arg_bot_with_no_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.optional_arg_bot_with_no_arg))
assert response[0] == 'hello default'
def test_two_message_bot():
bot = TestBot({})
response = list(bot.run_command(payloads.two_message_bot))
assert len(response) == 2
def test_help_subcommand():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_with_arg))
assert response[0].startswith("Got TypeError") # Help message
|
forter/boten
|
test/test_botparse.py
|
Python
|
apache-2.0
| 2,177
|
# -*- coding: utf-8 -*-
# Copyright Tom SF Haines
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import posixpath
import random
import math
from bin.shared import ray_cast
from bin.shared import csp
from direct.actor.Actor import Actor
from direct.interval.IntervalGlobal import *
from direct.interval.ActorInterval import ActorInterval
from panda3d.core import *
from panda3d.ode import *
class SimpleWeapon:
"""Provides a simple weapon system - not very sophisticaed, but good enough to test shooting things."""
def __init__(self,manager,xml):
self.gunView = render.attachNewNode('gun-view')
self.ray = None
self.reload(manager,xml)
def destroy(self):
self.gunView.removeNode()
if self.ray!=None:
self.ray.destroy()
def reload(self,manager,xml):
# Get the path to load weapons from...
basePath = manager.get('paths').getConfig().find('weapons').get('path')
# Variables to manage the firing state (Used G36 as reference for defaults.)...
bullet = xml.find('bullet')
if bullet!=None:
self.bulletRate = float(bullet.get('rate',1.0/12.5))
self.bulletSpeed = float(bullet.get('speed',920.0))
self.bulletWeight = float(bullet.get('mass',0.004))
else:
self.bulletRate = 1.0/12.5
self.bulletSpeed = 920.0
self.bulletWeight = 0.004
# Determine the weapon meshes path...
self.meshPath = posixpath.join(basePath, xml.find('egg').get('file'))
# Get the camera interface, so we can zoom in when the player aims...
self.camera = manager.get(xml.find('camera').get('plugin'))
# Create our gun node - both the gun and the ray used for shooting track this - allows for gun jitter, kick back etc...
parent = xml.find('parent')
self.gunView.reparentTo(manager.get(parent.get('plugin')).getNode(parent.get('node')))
# Create a ray cast to detect what the player is looking at... and what will be shot...
self.space = manager.get('ode').getSpace()
if self.ray!=None:
self.ray.destroy()
self.ray = OdeRayGeom(100.0)
self.ray.setCategoryBits(BitMask32(0xfffffffe))
self.ray.setCollideBits(BitMask32(0xfffffffe))
# Get all the stuff we need to do the muzzle flash particle effect...
flash = xml.find('muzzle_flash')
self.flashManager = manager.get(flash.get('plugin'))
self.flashEffect = flash.get('effect')
self.flashBone = flash.get('bone') # Will be swapped out for the actual node latter.
self.flashPos = csp.getPos(flash.get('pos'))
# Get all the stuff we need to do the bullet hit sparks effect...
sparks = xml.find('sparks')
self.sparksManager = manager.get(sparks.get('plugin'))
self.sparksEffect = sparks.get('effect')
# Create a quaternion that rotates +ve z to +ve y - used to point it in the weapon direction rather than up...
self.zToY = Quat()
self.zToY.setFromAxisAngle(-90.0,Vec3(1.0,0.0,0.0))
# State for the animation...
self.state = False # False==casual, True==aim.
self.nextState = False
# Firing state...
self.firing = False # True if the trigger is being held.
self.triggerTime = 0.0 # How long the trigger has been held for, so we know when to eject ammo.
# For bullet holes
bh = xml.find('bullet_holes')
if bh != None:
self.bulletHoles = manager.get(bh.get('plugin'))
else:
self.bulletHoles = None
def postInit(self):
for i in self.postReload():
yield i
def postReload(self):
# Load the actor...
self.mesh = Actor(self.meshPath)
yield
# Shader generator makes it shiny, plus we need it in the right places in the render graph...
self.mesh.setShaderAuto()
self.mesh.reparentTo(self.gunView)
self.mesh.hide()
yield
# Set its animation going... except we pause it until needed...
self.nextAni()
self.interval.pause()
# Gun flash requires an exposed bone...
self.flashBone = self.mesh.exposeJoint(None,"modelRoot",self.flashBone)
yield
def gunControl(self,task):
# Update the gun direction ray to follow the players view...
self.ray.setPosition(self.gunView.getPos(render))
self.ray.setQuaternion(self.zToY.multiply(self.gunView.getQuat(render)))
# If the gun is firing update the trigger time, if a bullet is ejected do the maths...
if self.firing:
dt = globalClock.getDt()
self.triggerTime += dt
while self.triggerTime>self.bulletRate:
self.triggerTime -= self.bulletRate
hit,pos,norm = ray_cast.nearestHit(self.space,self.ray)
# Create a muzzle flash effect...
self.flashManager.doEffect(self.flashEffect, self.flashBone, True, self.flashPos)
if hit:
# Create an impact sparks effect...
# Calculate the reflection direction...
rd = self.ray.getDirection()
sparkDir = (norm * (2.0*norm.dot(rd))) - rd
# Convert the reflection direction into a quaternion that will rotate +ve z to the required direction...
try:
ang = -math.acos(sparkDir[2])
except:
print 'Angle problem', sparkDir
ang = 0.0
axis = Vec3(0.0,0.0,1.0).cross(sparkDir)
axis.normalize()
sparkQuat = Quat()
sparkQuat.setFromAxisAngleRad(ang,axis)
# Set it going...
self.sparksManager.doEffect(self.sparksEffect, render, False, pos, sparkQuat)
# Make a bullet hole
if hit.hasBody() and isinstance(hit.getBody().getData(), NodePath):
self.bulletHoles.makeNew(pos, norm, hit.getBody().getData())
else:
self.bulletHoles.makeNew(pos, norm, None)
# Impart some energy on the object...
if hit and hit.hasBody():
body = hit.getBody()
# Calculate the force required to supply the energy the bullet contains to the body...
force = self.bulletWeight*self.bulletSpeed/0.05
# Get the direction of travel of the bullet, multiply by force...
d = self.ray.getDirection()
d *= force
# If the object is asleep awaken it...
if not body.isEnabled():
body.enable()
# Add the force to the object...
body.addForceAtPos(d,pos)
return task.cont
def start(self):
# Make the gun visible...
self.mesh.show()
# Set the gun animation going...
self.interval.finish()
# Weapon task - this primarily makes it shoot...
self.task = taskMgr.add(self.gunControl,'GunControl')
def stop(self):
self.interval.pause()
self.mesh.hide()
taskMgr.remove(self.task)
def nextAni(self):
self.state = self.nextState
if self.state:
ani = random.choice(('aim_wiggle_a','aim_wiggle_b','aim_wiggle_c'))
else:
ani = random.choice(('casual_wiggle_a','casual_wiggle_b','casual_wiggle_c'))
self.mesh.pose(ani,0)
self.interval = Sequence(self.mesh.actorInterval(ani),Func(self.nextAni))
self.interval.start()
def setAiming(self,s):
if self.nextState!=s:
self.interval.pause()
self.nextState = s
self.camera.setZoomed(s)
def wib():
self.interval.finish()
if s: ani = 'casual_aim'
else: ani = 'aim_casual'
transition = Sequence(self.mesh.actorInterval(ani),Func(wib))
transition.start()
def setFiring(self,s):
self.firing = s
if self.firing:
self.triggerTime = 0.0
|
Panda3D-google-code-repositories/naith
|
game/plugins/simpleweapon/simpleweapon.py
|
Python
|
apache-2.0
| 7,962
|
__source__ = 'https://leetcode.com/problems/balanced-binary-tree/#/description'
# https://github.com/kamyu104/LeetCode/blob/master/Python/balanced-binary-tree.py
# Time: O(n)
# Space: O(h), h is height of binary tree
# divide and conquer
#
# Description: Leetcode # 110. Balanced Binary Tree
#
# Given a binary tree, determine if it is height-balanced.
#
# For this problem, a height-balanced binary tree is defined as a binary tree
# in which the depth of the two subtrees of every node never differ by more than 1.
#
# Companies
# Bloomberg
# Related Topics
# Tree Depth-first Search
# Similar Questions
# Maximum Depth of Binary Tree
#
import unittest
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param root, a tree node
# @return a boolean
def isBalanced(self, root):
return (self.getHeight(root) >= 0)
def getHeight(self, root):
if root is None:
return 0
left_height = self.getHeight(root.left)
right_height = self.getHeight(root.right)
if left_height < 0 or right_height < 0 or abs(left_height - right_height) > 1:
return -1
return max(left_height, right_height) + 1
#http://www.programcreek.com/2013/02/leetcode-balanced-binary-tree-java/
class javaSolution:
# @param root, a tree node
# @return a boolean
def isBalanced(self, root):
if not root:
return None
if self.getHeight(root) == -1:
return False
return True
def getHeight(self, root):
if not root:
return 0
left = self.getHeight(root.left)
right = self.getHeight(root.right)
if left == -1 or right == -1:
return -1
if abs(left - right) > 1:
return -1
return max(left, right) + 1
class SolutionOther:
# @param root, a tree node
# @return a boolean
# http://www.cnblogs.com/zuoyuan/p/3720169.html
def isBalanced(self, root):
if root == None:
return True
if abs(self.Height(root.left) - self.Height(root.right)) <= 1:
return self.isBalanced(root.left) and self.isBalanced(root.right)
else:
return False
def Height(self, root) :
if root == None:
return 0
return max(self.Height(root.left), self.Height(root.right)) +1
#############test
#creating BST tree ####
root0=TreeNode(0)
tree1=TreeNode(1)
tree2=TreeNode(2)
tree3=TreeNode(3)
tree4=TreeNode(4)
tree5=TreeNode(5)
tree6=TreeNode(6)
root0.left=tree1
#root0.right=tree2
tree1.left=tree3
tree1.right=tree4
tree2.left=tree5
#tree2.right=tree6
#end of creating BST tree ####
#test
test = SolutionOther()
print test.isBalanced(root0)
#print test.isBalanced3(root0)
#print test.isBalanced2(root0)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
root = TreeNode(0)
root.left = TreeNode(1)
result = Solution().isBalanced(root)
print result
root.left.left = TreeNode(2)
result = javaSolution().isBalanced(root)
print result
if __name__ == '__main__':
unittest.main()
Java = '''
#Thought: https://leetcode.com/problems/contains-duplicate/solution/
Thought: This problem is generally believed to have two solutions:
the top down approach and the bottom up way.
DFS 1) The first method checks whether the tree is balanced strictly according to the definition
of balanced binary tree: the difference between the heights of the two sub trees are not bigger than 1,
and both the left sub tree and right sub tree are also balanced. With the helper function depth(),
we could easily write the code;
For the current node root, calling depth() for its left and right children actually has to access all of its children,
thus the complexity is O(N). We do this for each node in the tree,
so the overall complexity of isBalanced will be O(N^2). This is a top down approach.
DFS 2)The second method is based on DFS. Instead of calling depth() explicitly for each child node,
we return the height of the current node in DFS recursion.
When the sub tree of the current node (inclusive) is balanced, the function dfsHeight()
returns a non-negative value as the height.
Otherwise -1 is returned. According to the leftHeight and rightHeight of the two children,
the parent node could check if the sub tree is balanced, and decides its return value.
# DFS
# 87.89% 1ms
class Solution {
public boolean isBalanced(TreeNode root) {
return dfsHeight(root) != -1;
}
public int dfsHeight(TreeNode root) {
if (root == null) return 0;
int left = dfsHeight(root.left);
int right = dfsHeight(root.right);
if (left == -1 || right == -1 || Math.abs(left - right) > 1) return -1;
return Math.max(left, right) + 1;
}
}
# DFS
# 87.89% 1ms
class Solution {
public boolean isBalanced(TreeNode root) {
if (root == null) return true;
int left = getDpeth(root.left);
int right = getDpeth(root.right);
return Math.abs(left - right) <= 1 && isBalanced(root.left) && isBalanced(root.right);
}
public int getDpeth(TreeNode root) {
if (root == null) return 0;
return Math.max(getDpeth(root.left), getDpeth(root.right)) + 1;
}
}
'''
|
JulyKikuAkita/PythonPrac
|
cs15211/BalancedBinaryTree.py
|
Python
|
apache-2.0
| 5,434
|
#!/usr/bin/env python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import sys
import argparse
import subprocess
import signal
import re
boost_tests = [
'futures_test',
'memcached/test_ascii_parser',
'sstring_test',
'output_stream_test',
'httpd',
]
other_tests = [
'smp_test',
]
last_len = 0
def print_status_short(msg):
global last_len
print('\r' + ' '*last_len, end='')
last_len = len(msg)
print('\r' + msg, end='')
print_status_verbose = print
class Alarm(Exception):
pass
def alarm_handler(signum, frame):
raise Alarm
if __name__ == "__main__":
all_modes = ['debug', 'release']
parser = argparse.ArgumentParser(description="Seastar test runner")
parser.add_argument('--fast', action="store_true", help="Run only fast tests")
parser.add_argument('--name', action="store", help="Run only test whose name contains given string")
parser.add_argument('--mode', choices=all_modes, help="Run only tests for given build mode")
parser.add_argument('--timeout', action="store",default="300",type=int, help="timeout value for test execution")
parser.add_argument('--jenkins', action="store",help="jenkins output file prefix")
parser.add_argument('--verbose', '-v', action = 'store_true', default = False,
help = 'Verbose reporting')
args = parser.parse_args()
black_hole = open('/dev/null', 'w')
print_status = print_status_verbose if args.verbose else print_status_short
test_to_run = []
modes_to_run = all_modes if not args.mode else [args.mode]
for mode in modes_to_run:
prefix = os.path.join('build', mode, 'tests')
for test in other_tests:
test_to_run.append((os.path.join(prefix, test),'other'))
for test in boost_tests:
test_to_run.append((os.path.join(prefix, test),'boost'))
test_to_run.append(('tests/memcached/test.py --mode ' + mode + (' --fast' if args.fast else ''),'other'))
test_to_run.append((os.path.join(prefix, 'distributed_test') + ' -c 2','other'))
allocator_test_path = os.path.join(prefix, 'allocator_test')
if args.fast:
if mode == 'debug':
test_to_run.append((allocator_test_path + ' --iterations 5','other'))
else:
test_to_run.append((allocator_test_path + ' --time 0.1','other'))
else:
test_to_run.append((allocator_test_path,'other'))
if args.name:
test_to_run = [t for t in test_to_run if args.name in t[0]]
if args.jenkins:
jenkins_boost_log = open(args.jenkins+".boost.xml",'wb')
jenkins_boost_log.write(b'<TestLog><TestSuite name="all">')
all_ok = True
n_total = len(test_to_run)
env = os.environ
# disable false positive due to new (with_alignment(...)) ...
env['ASAN_OPTIONS'] = 'alloc_dealloc_mismatch=0'
for n, test in enumerate(test_to_run):
path = test[0]
prefix = '[%d/%d]' % (n + 1, n_total)
print_status('%s RUNNING %s' % (prefix, path))
signal.signal(signal.SIGALRM, alarm_handler)
if args.jenkins and test[1] == 'boost':
path = path + " --output_format=XML --log_level=all --report_level=no"
proc = subprocess.Popen(path.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env,preexec_fn=os.setsid)
signal.alarm(args.timeout)
out = None
err = None
try:
out, err = proc.communicate()
signal.alarm(0)
except:
os.killpg(os.getpgid(proc.pid), signal.SIGKILL)
proc.kill()
proc.returncode = -1
finally:
if proc.returncode:
print_status('FAILED: %s\n' % (path))
if proc.returncode == -1:
print_status('TIMED OUT\n')
if out:
print('=== stdout START ===')
print(out.decode())
print('=== stdout END ===')
if err:
print('=== stderr START ===')
print(err.decode())
print('=== stderr END ===')
all_ok = False
else:
print_status('%s PASSED %s' % (prefix, path))
if args.jenkins and test[1] == 'boost':
# remove the <TestLog> and </TestLog>
jenkins_boost_log.write(out[9:-10])
if args.jenkins:
jenkins_boost_log.write(b'</TestSuite></TestLog>')
if all_ok:
print('\nOK.')
else:
print_status('')
sys.exit(1)
|
ducthangho/imdb
|
test.py
|
Python
|
apache-2.0
| 5,282
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright (c) 2011 Piston Cloud Computing, Inc
# Copyright (c) 2012 University Of Minho
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Manages information about the guest.
This class encapsulates libvirt domain provides certain
higher level APIs around the raw libvirt API. These APIs are
then used by all the other libvirt related classes
"""
from lxml import etree
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from nova.i18n import _LE
from nova import utils
from nova.virt.libvirt import config as vconfig
libvirt = None
LOG = logging.getLogger(__name__)
class Guest(object):
def __init__(self, domain):
global libvirt
if libvirt is None:
libvirt = importutils.import_module('libvirt')
self._domain = domain
def __repr__(self):
return "<Guest %(id)d %(name)s %(uuid)s>" % {
'id': self.id,
'name': self.name,
'uuid': self.uuid
}
@property
def id(self):
return self._domain.ID()
@property
def uuid(self):
return self._domain.UUIDString()
@property
def name(self):
return self._domain.name()
@property
def _encoded_xml(self):
return encodeutils.safe_decode(self._domain.XMLDesc(0))
@classmethod
def create(cls, xml, host):
"""Create a new Guest
:param xml: XML definition of the domain to create
:param host: host.Host connection to define the guest on
:returns guest.Guest: Guest ready to be launched
"""
try:
# TODO(sahid): Host.write_instance_config should return
# an instance of Guest
domain = host.write_instance_config(xml)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error defining a domain with XML: %s') %
encodeutils.safe_decode(xml))
return cls(domain)
def launch(self, pause=False):
"""Starts a created guest.
:param pause: Indicates whether to start and pause the guest
"""
flags = pause and libvirt.VIR_DOMAIN_START_PAUSED or 0
try:
return self._domain.createWithFlags(flags)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error launching a defined domain '
'with XML: %s') %
self._encoded_xml, errors='ignore')
def poweroff(self):
"""Stops a running guest."""
self._domain.destroy()
def resume(self):
"""Resumes a suspended guest."""
self._domain.resume()
def enable_hairpin(self):
"""Enables hairpin mode for this guest."""
interfaces = self.get_interfaces()
try:
for interface in interfaces:
utils.execute(
'tee',
'/sys/class/net/%s/brport/hairpin_mode' % interface,
process_input='1',
run_as_root=True,
check_exit_code=[0, 1])
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error enabling hairpin mode with XML: %s') %
self._encoded_xml, errors='ignore')
def get_interfaces(self):
"""Returns a list of all network interfaces for this domain."""
doc = None
try:
doc = etree.fromstring(self._encoded_xml)
except Exception:
return []
interfaces = []
nodes = doc.findall('./devices/interface/target')
for target in nodes:
interfaces.append(target.get('dev'))
return interfaces
def get_vcpus_info(self):
"""Returns virtual cpus information of guest.
:returns: guest.VCPUInfo
"""
vcpus = self._domain.vcpus()
if vcpus is not None:
for vcpu in vcpus[0]:
yield VCPUInfo(
id=vcpu[0], cpu=vcpu[3], state=vcpu[1], time=vcpu[2])
def delete_configuration(self):
"""Undefines a domain from hypervisor."""
try:
self._domain.undefineFlags(
libvirt.VIR_DOMAIN_UNDEFINE_MANAGED_SAVE)
except libvirt.libvirtError:
LOG.debug("Error from libvirt during undefineFlags. %d"
"Retrying with undefine", self.id)
self._domain.undefine()
except AttributeError:
# Older versions of libvirt don't support undefine flags,
# trying to remove managed image
try:
if self._domain.hasManagedSaveImage(0):
self._domain.managedSaveRemove(0)
except AttributeError:
pass
self._domain.undefine()
def has_persistent_configuration(self):
"""Whether domain config is persistently stored on the host."""
return self._domain.isPersistent()
def attach_device(self, conf, persistent=False, live=False):
"""Attaches device to the guest.
:param conf: A LibvirtConfigObject of the device to attach
:param persistent: A bool to indicate whether the change is
persistent or not
:param live: A bool to indicate whether it affect the guest
in running state
"""
flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0
flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0
self._domain.attachDeviceFlags(conf.to_xml(), flags=flags)
def get_disk(self, device):
"""Returns the disk mounted at device
:returns LivirtConfigGuestDisk: mounted at device or None
"""
try:
doc = etree.fromstring(self._domain.XMLDesc(0))
except Exception:
return None
node = doc.find("./devices/disk/target[@dev='%s'].." % device)
if node is not None:
conf = vconfig.LibvirtConfigGuestDisk()
conf.parse_dom(node)
return conf
def detach_device(self, conf, persistent=False, live=False):
"""Detaches device to the guest.
:param conf: A LibvirtConfigObject of the device to detach
:param persistent: A bool to indicate whether the change is
persistent or not
:param live: A bool to indicate whether it affect the guest
in running state
"""
flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0
flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0
self._domain.detachDeviceFlags(conf.to_xml(), flags=flags)
def get_xml_desc(self, dump_inactive=False, dump_sensitive=False,
dump_migratable=False):
"""Returns xml description of guest.
:param dump_inactive: Dump inactive domain information
:param dump_sensitive: Dump security sensitive information
:param dump_migratable: Dump XML suitable for migration
:returns string: XML description of the guest
"""
flags = dump_inactive and libvirt.VIR_DOMAIN_XML_INACTIVE or 0
flags |= dump_sensitive and libvirt.VIR_DOMAIN_XML_SECURE or 0
flags |= dump_migratable and libvirt.VIR_DOMAIN_XML_MIGRATABLE or 0
return self._domain.XMLDesc(flags=flags)
def save_memory_state(self):
"""Saves the domain's memory state. Requires running domain.
raises: raises libvirtError on error
"""
self._domain.managedSave(0)
class VCPUInfo(object):
def __init__(self, id, cpu, state, time):
"""Structure for information about guest vcpus.
:param id: The virtual cpu number
:param cpu: The host cpu currently associated
:param state: The running state of the vcpu (0 offline, 1 running, 2
blocked on resource)
:param time: The cpu time used in nanoseconds
"""
self.id = id
self.cpu = cpu
self.state = state
self.time = time
|
CloudServer/nova
|
nova/virt/libvirt/guest.py
|
Python
|
apache-2.0
| 9,019
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cd_subscription', '0004_auto_20161125_1901'),
]
operations = [
migrations.AlterField(
model_name='cdsubscription',
name='badgelength',
field=models.IntegerField(blank=True, default=0, verbose_name='Length badge image'),
),
migrations.AlterField(
model_name='cdsubscription',
name='emailparent',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='emailplayer',
field=models.EmailField(blank=True, max_length=40, default='', verbose_name='Email player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameattendant',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='fullnameparent',
field=models.CharField(blank=True, max_length=50, default='', verbose_name='Full name parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileattendant',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM number responsible on site'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileparent',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM parent'),
),
migrations.AlterField(
model_name='cdsubscription',
name='mobileplayer',
field=models.CharField(blank=True, max_length=15, default='', verbose_name='GSM player'),
),
migrations.AlterField(
model_name='cdsubscription',
name='payamount',
field=models.IntegerField(blank=True, default=0, verbose_name='Amount to pay'),
),
migrations.AlterField(
model_name='cdsubscription',
name='paydate',
field=models.DateField(null=True, verbose_name='Payment date'),
),
]
|
cropr/bjk2017
|
cd_subscription/migrations/0005_auto_20161125_1908.py
|
Python
|
apache-2.0
| 2,446
|
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is more of a placeholder for now, we can add
# official color schemes in follow-ups.
import abc
import dataclasses
from typing import Iterable, List, Optional
import cirq
from cirq.protocols.circuit_diagram_info_protocol import CircuitDiagramInfoArgs
@dataclasses.dataclass
class SymbolInfo:
"""Organizes information about a symbol."""
labels: List[str]
colors: List[str]
@staticmethod
def unknown_operation(num_qubits: int) -> 'SymbolInfo':
"""Generates a SymbolInfo object for an unknown operation.
Args:
num_qubits: the number of qubits in the operation
"""
symbol_info = SymbolInfo([], [])
for _ in range(num_qubits):
symbol_info.colors.append('gray')
symbol_info.labels.append('?')
return symbol_info
class SymbolResolver(metaclass=abc.ABCMeta):
"""Abstract class providing the interface for users to specify information
about how a particular symbol should be displayed in the 3D circuit
"""
def __call__(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
return self.resolve(operation)
@abc.abstractmethod
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
"""Converts cirq.Operation objects into SymbolInfo objects for serialization."""
class DefaultResolver(SymbolResolver):
"""Default symbol resolver implementation. Takes information
from circuit_diagram_info, if unavailable, returns information representing
an unknown symbol.
"""
_SYMBOL_COLORS = {
'@': 'black',
'H': 'yellow',
'I': 'orange',
'X': 'black',
'Y': 'pink',
'Z': 'cyan',
'S': '#90EE90',
'T': '#CBC3E3',
}
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
"""Checks for the _circuit_diagram_info attribute of the operation,
and if it exists, build the symbol information from it. Otherwise,
builds symbol info for an unknown operation.
Args:
operation: the cirq.Operation object to resolve
"""
try:
info = cirq.circuit_diagram_info(operation)
except TypeError:
return SymbolInfo.unknown_operation(cirq.num_qubits(operation))
wire_symbols = info.wire_symbols
symbol_exponent = info._wire_symbols_including_formatted_exponent(
CircuitDiagramInfoArgs.UNINFORMED_DEFAULT
)
symbol_info = SymbolInfo(list(symbol_exponent), [])
for symbol in wire_symbols:
symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))
return symbol_info
DEFAULT_SYMBOL_RESOLVERS: Iterable[SymbolResolver] = tuple([DefaultResolver()])
def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo:
"""Builds a SymbolInfo object based off of a designated operation
and list of resolvers. The latest resolver takes precendent.
Args:
operation: the cirq.Operation object to resolve
resolvers: a list of SymbolResolvers which provides instructions
on how to build SymbolInfo objects.
Raises:
ValueError: if the operation cannot be resolved into a symbol.
"""
symbol_info = None
for resolver in resolvers:
info = resolver(operation)
if info is not None:
symbol_info = info
if symbol_info is None:
raise ValueError(f'Cannot resolve operation: {operation}')
return symbol_info
class Operation3DSymbol:
def __init__(self, wire_symbols, location_info, color_info, moment):
"""Gathers symbol information from an operation and builds an
object to represent it in 3D.
Args:
wire_symbols: a list of symbols taken from circuit_diagram_info()
that will be used to represent the operation in the 3D circuit.
location_info: A list of coordinates for each wire_symbol. The
index of the coordinate tuple in the location_info list must
correspond with the index of the symbol in the wire_symbols list.
color_info: a list representing the desired color of the symbol(s).
These will also correspond to index of the symbol in the
wire_symbols list.
moment: the moment where the symbol should be.
"""
self.wire_symbols = wire_symbols
self.location_info = location_info
self.color_info = color_info
self.moment = moment
def to_typescript(self):
return {
'wire_symbols': list(self.wire_symbols),
'location_info': self.location_info,
'color_info': self.color_info,
'moment': self.moment,
}
|
quantumlib/Cirq
|
cirq-web/cirq_web/circuits/symbols.py
|
Python
|
apache-2.0
| 5,383
|
#!/usr/bin/env python
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optparse
import subprocess
from maas_common import metric
from maas_common import metric_bool
from maas_common import print_output
from maas_common import status_err
from maas_common import status_ok
import requests
OVERVIEW_URL = "http://%s:%s/api/overview"
NODES_URL = "http://%s:%s/api/nodes"
CONNECTIONS_URL = "http://%s:%s/api/connections?columns=channels"
QUEUES_URL = "http://%s:%s/api/queues"
CLUSTERED = True
CLUSTER_SIZE = 3
# {metric_category: {metric_name: metric_unit}}
OVERVIEW_METRICS = {"queue_totals": {"messages": "messages",
"messages_ready": "messages",
"messages_unacknowledged": "messages"},
"message_stats": {"get": "messages",
"ack": "messages",
"deliver_get": "messages",
"deliver": "messages",
"publish": "messages"}}
# {metric_name: metric_unit}
NODES_METRICS = {"proc_used": "processes",
"proc_total": "processes",
"fd_used": "fd",
"fd_total": "fd",
"sockets_used": "fd",
"sockets_total": "fd",
"mem_used": "bytes",
"mem_limit": "bytes",
"mem_alarm": "status",
"disk_free_alarm": "status",
"uptime": "ms"}
CONNECTIONS_METRICS = {"max_channels_per_conn": "channels"}
def hostname():
"""Return the name of the current host/node."""
return subprocess.check_output(['hostname', '-s']).strip()
def rabbit_version(node):
if ('applications' in node and 'rabbit' in node['applications']
and 'version' in node['applications']['rabbit']):
version_string = node['applications']['rabbit']['version']
return tuple(int(part) for part in version_string.split('.'))
else:
return tuple()
def parse_args():
parser = optparse.OptionParser(
usage='%prog [-h] [-H hostname] [-P port] [-u username] [-p password]'
)
parser.add_option('-H', '--host', action='store', dest='host',
default='localhost',
help='Host address to use when connecting')
parser.add_option('-P', '--port', action='store', dest='port',
default='15672',
help='Port to use when connecting')
parser.add_option('-U', '--username', action='store', dest='username',
default='guest',
help='Username to use for authentication')
parser.add_option('-p', '--password', action='store', dest='password',
default='guest',
help='Password to use for authentication')
parser.add_option('-n', '--name', action='store', dest='name',
default=None,
help=("Check a node's cluster membership using the "
'provided name'))
return parser.parse_args()
def _get_rabbit_json(session, url):
try:
response = session.get(url)
except requests.exceptions.ConnectionError as e:
status_err(str(e))
if response.ok:
return response.json()
else:
status_err('Received status {0} from RabbitMQ API'.format(
response.status_code))
def _get_connection_metrics(session, metrics, host, port):
response = _get_rabbit_json(session, CONNECTIONS_URL % (host, port))
max_chans = max(connection['channels'] for connection in response
if 'channels' in connection)
for k in CONNECTIONS_METRICS:
metrics[k] = {'value': max_chans, 'unit': CONNECTIONS_METRICS[k]}
def _get_overview_metrics(session, metrics, host, port):
response = _get_rabbit_json(session, OVERVIEW_URL % (host, port))
for k in OVERVIEW_METRICS:
if k in response:
for a, b in OVERVIEW_METRICS[k].items():
if a in response[k]:
metrics[a] = {'value': response[k][a], 'unit': b}
def _get_node_metrics(session, metrics, host, port, name):
response = _get_rabbit_json(session, NODES_URL % (host, port))
# Either use the option provided by the commandline flag or the current
# hostname
name = '@' + (name or hostname())
is_cluster_member = False
# Ensure this node is a member of the cluster
nodes_matching_name = [n for n in response
if n['name'].endswith(name)]
is_cluster_member = any(nodes_matching_name)
if CLUSTERED:
if len(response) < CLUSTER_SIZE:
status_err('cluster too small')
if not is_cluster_member:
status_err('{0} not a member of the cluster'.format(name))
for k, v in NODES_METRICS.items():
metrics[k] = {'value': nodes_matching_name[0][k], 'unit': v}
# We don't know exactly which version introduces data for all
# nodes in the cluster returned by the NODES_URL, but we know it is
# in 3.5.x at least.
if rabbit_version(nodes_matching_name[0]) > (3, 5):
# Gather the queue lengths for all nodes in the cluster
queues = [n['run_queue'] for n in response
if n.get('run_queue', None)]
# Grab the first queue length
first = queues.pop()
# Check that all other queues are equal to it
if not all(first == q for q in queues):
# If they're not, the queues are not synchronized
status_err('Cluster not replicated across all nodes')
def _get_queue_metrics(session, metrics, host, port):
response = _get_rabbit_json(session, QUEUES_URL % (host, port))
notification_messages = sum([q['messages'] for q in response
if q['name'].startswith('notifications.')])
metrics['notification_messages'] = {
'value': notification_messages,
'unit': 'messages'
}
metrics['msgs_excl_notifications'] = {
'value': metrics['messages']['value'] - notification_messages,
'unit': 'messages'
}
def main():
(options, _) = parse_args()
metrics = {}
session = requests.Session() # Make a Session to store the auth creds
session.auth = (options.username, options.password)
_get_connection_metrics(session, metrics, options.host, options.port)
_get_overview_metrics(session, metrics, options.host, options.port)
_get_node_metrics(session, metrics, options.host, options.port,
options.name)
_get_queue_metrics(session, metrics, options.host, options.port)
status_ok()
for k, v in metrics.items():
if v['value'] is True or v['value'] is False:
metric_bool('rabbitmq_%s_status' % k, not v['value'])
else:
metric('rabbitmq_%s' % k, 'int64', v['value'], v['unit'])
if __name__ == "__main__":
with print_output():
main()
|
stevelle/rpc-openstack
|
maas/plugins/rabbitmq_status.py
|
Python
|
apache-2.0
| 7,579
|
# -*- coding: utf-8 -*-
"""
Authors: Tim Hessels
UNESCO-IHE 2017
Contact: t.hessels@unesco-ihe.org
Repository: https://github.com/wateraccounting/wa
Module: Collect/JRC
Description:
This module downloads JRC water occurrence data from http://storage.googleapis.com/global-surface-water/downloads/.
Use the JRC.Occurrence function to
download and create a water occurrence image in Gtiff format.
The data represents the period 1984-2015.
Examples:
from wa.Collect import JRC
JRC.Occurrence(Dir='C:/Temp3/', latlim=[41, 45], lonlim=[-8, -5])
"""
from .Occurrence import main as Occurrence
__all__ = ['Occurrence']
__version__ = '0.1'
|
wateraccounting/wa
|
Collect/JRC/__init__.py
|
Python
|
apache-2.0
| 647
|
def handle(controller_slice):
from core.models import ControllerSlice, Slice
try:
my_status_code = int(controller_slice.backend_status[0])
try:
his_status_code = int(controller_slice.slice.backend_status[0])
except:
his_status_code = 0
if (my_status_code not in [0,his_status_code]):
controller_slice.slice.backend_status = controller_slice.backend_status
controller_slice.slice.save(update_fields = ['backend_status'])
except Exception,e:
print str(e)
pass
|
wathsalav/xos
|
xos/model_policies/model_policy_ControllerSlice.py
|
Python
|
apache-2.0
| 573
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import logging
import sys
import uuid
import fixtures
from oslo_serialization import jsonutils
from oslo_utils import strutils
from oslo_utils import timeutils
from stevedore import dispatch
from stevedore import extension
import testscenarios
import yaml
import oslo_messaging
from oslo_messaging.notify import _impl_log
from oslo_messaging.notify import _impl_test
from oslo_messaging.notify import messaging
from oslo_messaging.notify import notifier as msg_notifier
from oslo_messaging import serializer as msg_serializer
from oslo_messaging.tests import utils as test_utils
from six.moves import mock
load_tests = testscenarios.load_tests_apply_scenarios
class JsonMessageMatcher(object):
def __init__(self, message):
self.message = message
def __eq__(self, other):
return self.message == jsonutils.loads(other)
class _FakeTransport(object):
def __init__(self, conf):
self.conf = conf
def _send_notification(self, target, ctxt, message, version, retry=None):
pass
class _ReRaiseLoggedExceptionsFixture(fixtures.Fixture):
"""Record logged exceptions and re-raise in cleanup.
The notifier just logs notification send errors so, for the sake of
debugging test failures, we record any exceptions logged and re-raise them
during cleanup.
"""
class FakeLogger(object):
def __init__(self):
self.exceptions = []
def exception(self, msg, *args, **kwargs):
self.exceptions.append(sys.exc_info()[1])
def setUp(self):
super(_ReRaiseLoggedExceptionsFixture, self).setUp()
self.logger = self.FakeLogger()
def reraise_exceptions():
for ex in self.logger.exceptions:
raise ex
self.addCleanup(reraise_exceptions)
class TestMessagingNotifier(test_utils.BaseTestCase):
_v1 = [
('v1', dict(v1=True)),
('not_v1', dict(v1=False)),
]
_v2 = [
('v2', dict(v2=True)),
('not_v2', dict(v2=False)),
]
_publisher_id = [
('ctor_pub_id', dict(ctor_pub_id='test',
expected_pub_id='test')),
('prep_pub_id', dict(prep_pub_id='test.localhost',
expected_pub_id='test.localhost')),
('override', dict(ctor_pub_id='test',
prep_pub_id='test.localhost',
expected_pub_id='test.localhost')),
]
_topics = [
('no_topics', dict(topics=[])),
('single_topic', dict(topics=['notifications'])),
('multiple_topic2', dict(topics=['foo', 'bar'])),
]
_priority = [
('audit', dict(priority='audit')),
('debug', dict(priority='debug')),
('info', dict(priority='info')),
('warn', dict(priority='warn')),
('error', dict(priority='error')),
('sample', dict(priority='sample')),
('critical', dict(priority='critical')),
]
_payload = [
('payload', dict(payload={'foo': 'bar'})),
]
_context = [
('ctxt', dict(ctxt={'user': 'bob'})),
]
_retry = [
('unconfigured', dict()),
('None', dict(retry=None)),
('0', dict(retry=0)),
('5', dict(retry=5)),
]
@classmethod
def generate_scenarios(cls):
cls.scenarios = testscenarios.multiply_scenarios(cls._v1,
cls._v2,
cls._publisher_id,
cls._topics,
cls._priority,
cls._payload,
cls._context,
cls._retry)
def setUp(self):
super(TestMessagingNotifier, self).setUp()
self.logger = self.useFixture(_ReRaiseLoggedExceptionsFixture()).logger
self.stubs.Set(messaging, 'LOG', self.logger)
self.stubs.Set(msg_notifier, '_LOG', self.logger)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_notifier(self, mock_utcnow):
drivers = []
if self.v1:
drivers.append('messaging')
if self.v2:
drivers.append('messagingv2')
self.config(driver=drivers,
topics=self.topics,
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
if hasattr(self, 'ctor_pub_id'):
notifier = oslo_messaging.Notifier(transport,
publisher_id=self.ctor_pub_id)
else:
notifier = oslo_messaging.Notifier(transport)
prepare_kwds = {}
if hasattr(self, 'retry'):
prepare_kwds['retry'] = self.retry
if hasattr(self, 'prep_pub_id'):
prepare_kwds['publisher_id'] = self.prep_pub_id
if prepare_kwds:
notifier = notifier.prepare(**prepare_kwds)
transport._send_notification = mock.Mock()
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock(return_value=message_id)
mock_utcnow.return_value = datetime.datetime.utcnow()
message = {
'message_id': str(message_id),
'publisher_id': self.expected_pub_id,
'event_type': 'test.notify',
'priority': self.priority.upper(),
'payload': self.payload,
'timestamp': str(timeutils.utcnow()),
}
sends = []
if self.v1:
sends.append(dict(version=1.0))
if self.v2:
sends.append(dict(version=2.0))
calls = []
for send_kwargs in sends:
for topic in self.topics:
if hasattr(self, 'retry'):
send_kwargs['retry'] = self.retry
else:
send_kwargs['retry'] = None
target = oslo_messaging.Target(topic='%s.%s' % (topic,
self.priority))
calls.append(mock.call(target,
self.ctxt,
message,
**send_kwargs))
method = getattr(notifier, self.priority)
method(self.ctxt, 'test.notify', self.payload)
uuid.uuid4.assert_called_once_with()
transport._send_notification.assert_has_calls(calls, any_order=True)
TestMessagingNotifier.generate_scenarios()
class TestSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestSerializer, self).setUp()
self.addCleanup(_impl_test.reset)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_serializer(self, mock_utcnow):
transport = _FakeTransport(self.conf)
serializer = msg_serializer.NoOpSerializer()
notifier = oslo_messaging.Notifier(transport,
'test.localhost',
driver='test',
topic='test',
serializer=serializer)
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock(return_value=message_id)
mock_utcnow.return_value = datetime.datetime.utcnow()
serializer.serialize_context = mock.Mock()
serializer.serialize_context.return_value = dict(user='alice')
serializer.serialize_entity = mock.Mock()
serializer.serialize_entity.return_value = 'sbar'
notifier.info(dict(user='bob'), 'test.notify', 'bar')
message = {
'message_id': str(message_id),
'publisher_id': 'test.localhost',
'event_type': 'test.notify',
'priority': 'INFO',
'payload': 'sbar',
'timestamp': str(timeutils.utcnow()),
}
self.assertEqual([(dict(user='alice'), message, 'INFO', None)],
_impl_test.NOTIFICATIONS)
uuid.uuid4.assert_called_once_with()
serializer.serialize_context.assert_called_once_with(dict(user='bob'))
serializer.serialize_entity.assert_called_once_with(dict(user='bob'),
'bar')
class TestNotifierTopics(test_utils.BaseTestCase):
def test_topics_from_config(self):
self.config(driver=['log'],
group='oslo_messaging_notifications')
self.config(topics=['topic1', 'topic2'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost')
self.assertEqual(['topic1', 'topic2'], notifier._topics)
def test_topics_from_kwargs(self):
self.config(driver=['log'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost',
topic='topic1')
self.assertEqual(['topic1'], notifier._topics)
notifier = oslo_messaging.Notifier(transport, 'test.localhost',
topics=['topic1', 'topic2'])
self.assertEqual(['topic1', 'topic2'], notifier._topics)
class TestLogNotifier(test_utils.BaseTestCase):
@mock.patch('oslo_utils.timeutils.utcnow')
def test_notifier(self, mock_utcnow):
self.config(driver=['log'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost')
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock()
uuid.uuid4.return_value = message_id
mock_utcnow.return_value = datetime.datetime.utcnow()
message = {
'message_id': str(message_id),
'publisher_id': 'test.localhost',
'event_type': 'test.notify',
'priority': 'INFO',
'payload': 'bar',
'timestamp': str(timeutils.utcnow()),
}
logger = mock.Mock()
logging.getLogger = mock.Mock()
logging.getLogger.return_value = logger
notifier.info({}, 'test.notify', 'bar')
uuid.uuid4.assert_called_once_with()
logging.getLogger.assert_called_once_with('oslo.messaging.'
'notification.test.notify')
logger.info.assert_called_once_with(JsonMessageMatcher(message))
def test_sample_priority(self):
# Ensure logger drops sample-level notifications.
driver = _impl_log.LogDriver(None, None, None)
logger = mock.Mock(spec=logging.getLogger('oslo.messaging.'
'notification.foo'))
logger.sample = None
logging.getLogger = mock.Mock()
logging.getLogger.return_value = logger
msg = {'event_type': 'foo'}
driver.notify(None, msg, "sample", None)
logging.getLogger.assert_called_once_with('oslo.messaging.'
'notification.foo')
def test_mask_passwords(self):
# Ensure that passwords are masked with notifications
driver = _impl_log.LogDriver(None, None, None)
logger = mock.MagicMock()
logger.info = mock.MagicMock()
message = {'password': 'passw0rd', 'event_type': 'foo'}
mask_str = jsonutils.dumps(strutils.mask_dict_password(message))
with mock.patch.object(logging, 'getLogger') as gl:
gl.return_value = logger
driver.notify(None, message, 'info', 0)
logger.info.assert_called_once_with(mask_str)
class TestRoutingNotifier(test_utils.BaseTestCase):
def setUp(self):
super(TestRoutingNotifier, self).setUp()
self.config(driver=['routing'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
self.notifier = oslo_messaging.Notifier(transport)
self.router = self.notifier._driver_mgr['routing'].obj
def _fake_extension_manager(self, ext):
return extension.ExtensionManager.make_test_instance(
[extension.Extension('test', None, None, ext), ])
def _empty_extension_manager(self):
return extension.ExtensionManager.make_test_instance([])
def test_should_load_plugin(self):
self.router.used_drivers = set(["zoo", "blah"])
ext = mock.MagicMock()
ext.name = "foo"
self.assertFalse(self.router._should_load_plugin(ext))
ext.name = "zoo"
self.assertTrue(self.router._should_load_plugin(ext))
def test_load_notifiers_no_config(self):
# default routing_config=""
self.router._load_notifiers()
self.assertEqual({}, self.router.routing_groups)
self.assertEqual(0, len(self.router.used_drivers))
def test_load_notifiers_no_extensions(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r""
config_file = mock.MagicMock()
config_file.return_value = routing_config
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=self._empty_extension_manager()):
with mock.patch('oslo_messaging.notify.'
'_impl_routing.LOG') as mylog:
self.router._load_notifiers()
self.assertFalse(mylog.debug.called)
self.assertEqual({}, self.router.routing_groups)
def test_load_notifiers_config(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r"""
group_1:
rpc : foo
group_2:
rpc : blah
"""
config_file = mock.MagicMock()
config_file.return_value = routing_config
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=self._fake_extension_manager(
mock.MagicMock())):
self.router._load_notifiers()
groups = list(self.router.routing_groups.keys())
groups.sort()
self.assertEqual(['group_1', 'group_2'], groups)
def test_get_drivers_for_message_accepted_events(self):
config = r"""
group_1:
rpc:
accepted_events:
- foo.*
- blah.zoo.*
- zip
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# No matching event ...
self.assertEqual([],
self.router._get_drivers_for_message(
group, "unknown", "info"))
# Child of foo ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, "foo.1", "info"))
# Foo itself ...
self.assertEqual([],
self.router._get_drivers_for_message(
group, "foo", "info"))
# Child of blah.zoo
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, "blah.zoo.zing", "info"))
def test_get_drivers_for_message_accepted_priorities(self):
config = r"""
group_1:
rpc:
accepted_priorities:
- info
- error
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# No matching priority
self.assertEqual([],
self.router._get_drivers_for_message(
group, None, "unknown"))
# Info ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, None, "info"))
# Error (to make sure the list is getting processed) ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, None, "error"))
def test_get_drivers_for_message_both(self):
config = r"""
group_1:
rpc:
accepted_priorities:
- info
accepted_events:
- foo.*
driver_1:
accepted_priorities:
- info
driver_2:
accepted_events:
- foo.*
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# Valid event, but no matching priority
self.assertEqual(['driver_2'],
self.router._get_drivers_for_message(
group, 'foo.blah', "unknown"))
# Valid priority, but no matching event
self.assertEqual(['driver_1'],
self.router._get_drivers_for_message(
group, 'unknown', "info"))
# Happy day ...
x = self.router._get_drivers_for_message(group, 'foo.blah', "info")
x.sort()
self.assertEqual(['driver_1', 'driver_2', 'rpc'], x)
def test_filter_func(self):
ext = mock.MagicMock()
ext.name = "rpc"
# Good ...
self.assertTrue(self.router._filter_func(ext, {}, {}, 'info',
None, ['foo', 'rpc']))
# Bad
self.assertFalse(self.router._filter_func(ext, {}, {}, 'info',
None, ['foo']))
def test_notify(self):
self.router.routing_groups = {'group_1': None, 'group_2': None}
drivers_mock = mock.MagicMock()
drivers_mock.side_effect = [['rpc'], ['foo']]
with mock.patch.object(self.router, 'plugin_manager') as pm:
with mock.patch.object(self.router, '_get_drivers_for_message',
drivers_mock):
self.notifier.info({}, 'my_event', {})
self.assertEqual(sorted(['rpc', 'foo']),
sorted(pm.map.call_args[0][6]))
def test_notify_filtered(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r"""
group_1:
rpc:
accepted_events:
- my_event
rpc2:
accepted_priorities:
- info
bar:
accepted_events:
- nothing
"""
config_file = mock.MagicMock()
config_file.return_value = routing_config
rpc_driver = mock.Mock()
rpc2_driver = mock.Mock()
bar_driver = mock.Mock()
pm = dispatch.DispatchExtensionManager.make_test_instance(
[extension.Extension('rpc', None, None, rpc_driver),
extension.Extension('rpc2', None, None, rpc2_driver),
extension.Extension('bar', None, None, bar_driver)],
)
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=pm):
self.notifier.info({}, 'my_event', {})
self.assertFalse(bar_driver.info.called)
rpc_driver.notify.assert_called_once_with(
{}, mock.ANY, 'INFO', None)
rpc2_driver.notify.assert_called_once_with(
{}, mock.ANY, 'INFO', None)
|
ozamiatin/oslo.messaging
|
oslo_messaging/tests/notify/test_notifier.py
|
Python
|
apache-2.0
| 20,524
|
from __future__ import absolute_import
import json
from changes.config import db
from changes.constants import Result
from changes.models.jobplan import JobPlan
from changes.utils.http import build_web_uri
from .base import ArtifactHandler, ArtifactParseError
class CollectionArtifactHandler(ArtifactHandler):
"""
Base class artifact handler for collection (jobs.json and tests.json) files.
Does the required job expansion. Subclasses are expected to set
cls.FILENAMES to the handleable files in question.
"""
def process(self, fp, artifact):
try:
phase_config = json.load(fp)
except ValueError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('Failed to parse json; (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.report_malformed()
else:
_, implementation = JobPlan.get_build_step_for_job(job_id=self.step.job_id)
try:
implementation.expand_jobs(self.step, phase_config)
except ArtifactParseError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('malformed %s artifact (step=%s, build=%s)', self.FILENAMES[0],
self.step.id.hex, uri, exc_info=True)
self.report_malformed()
except Exception:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('expand_jobs failed (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.step.result = Result.infra_failed
db.session.add(self.step)
db.session.commit()
class TestsJsonHandler(CollectionArtifactHandler):
# only match in the root directory
FILENAMES = ('/tests.json',)
|
dropbox/changes
|
changes/artifacts/collection_artifact.py
|
Python
|
apache-2.0
| 1,917
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Fri Dec 2 15:05:18 2011 by generateDS.py version 2.7b.
#
import sys
import getopt
import re as re_
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError("Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")')
return input_data
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace,name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class EnvelopeType(GeneratedsSuper):
"""Root OVF descriptor type"""
subclass = None
superclass = None
def __init__(self, lang='en-US', References=None, Section=None, Content=None, Strings=None):
self.lang = _cast(None, lang)
self.References = References
if Section is None:
self.Section = []
else:
self.Section = Section
self.Content = Content
if Strings is None:
self.Strings = []
else:
self.Strings = Strings
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if EnvelopeType.subclass:
return EnvelopeType.subclass(*args_, **kwargs_)
else:
return EnvelopeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_References(self): return self.References
def set_References(self, References): self.References = References
def get_Section(self): return self.Section
def set_Section(self, Section): self.Section = Section
def add_Section(self, value): self.Section.append(value)
def insert_Section(self, index, value): self.Section[index] = value
def get_Content(self): return self.Content
def set_Content(self, Content): self.Content = Content
def get_Strings(self): return self.Strings
def set_Strings(self, Strings): self.Strings = Strings
def add_Strings(self, value): self.Strings.append(value)
def insert_Strings(self, index, value): self.Strings[index] = value
def get_lang(self): return self.lang
def set_lang(self, lang): self.lang = lang
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='EnvelopeType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='EnvelopeType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='EnvelopeType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
outfile.write(' lang=%s' % (self.gds_format_string(quote_attrib(self.lang).encode(ExternalEncoding), input_name='lang'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='EnvelopeType', fromsubclass_=False):
if self.References is not None:
self.References.export(outfile, level, namespace_, name_='References', )
for Section_ in self.Section:
Section_.export(outfile, level, namespace_, name_='Section')
if self.Content is not None:
self.Content.export(outfile, level, namespace_, name_='Content', )
for Strings_ in self.Strings:
Strings_.export(outfile, level, namespace_, name_='Strings')
def hasContent_(self):
if (
self.References is not None or
self.Section or
self.Content is not None or
self.Strings
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='EnvelopeType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
showIndent(outfile, level)
outfile.write('lang = "%s",\n' % (self.lang,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.References is not None:
showIndent(outfile, level)
outfile.write('References=model_.References_Type(\n')
self.References.exportLiteral(outfile, level, name_='References')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Section=[\n')
level += 1
for Section_ in self.Section:
showIndent(outfile, level)
outfile.write('model_.Section(\n')
Section_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.Content is not None:
showIndent(outfile, level)
outfile.write('Content=model_.Content(\n')
self.Content.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Strings=[\n')
level += 1
for Strings_ in self.Strings:
showIndent(outfile, level)
outfile.write('model_.Strings_Type(\n')
Strings_.exportLiteral(outfile, level, name_='Strings_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('lang', node)
if value is not None and 'lang' not in already_processed:
already_processed.append('lang')
self.lang = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'References':
obj_ = References_Type.factory()
obj_.build(child_)
self.set_References(obj_)
elif nodeName_ == 'Section':
class_obj_ = self.get_class_obj_(child_, Section_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Section.append(obj_)
elif nodeName_ == 'Content':
class_obj_ = self.get_class_obj_(child_, Content_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Content(obj_)
elif nodeName_ == 'Strings':
obj_ = Strings_Type.factory()
obj_.build(child_)
self.Strings.append(obj_)
# end class EnvelopeType
class References_Type(GeneratedsSuper):
"""Type for list of external resources"""
subclass = None
superclass = None
def __init__(self, File=None, anytypeobjs_=None):
if File is None:
self.File = []
else:
self.File = File
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if References_Type.subclass:
return References_Type.subclass(*args_, **kwargs_)
else:
return References_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_File(self): return self.File
def set_File(self, File): self.File = File
def add_File(self, value): self.File.append(value)
def insert_File(self, index, value): self.File[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='References_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='References_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='References_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='References_Type', fromsubclass_=False):
for File_ in self.File:
File_.export(outfile, level, namespace_, name_='File')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.File or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='References_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('File=[\n')
level += 1
for File_ in self.File:
showIndent(outfile, level)
outfile.write('model_.File_Type(\n')
File_.exportLiteral(outfile, level, name_='File_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'File':
obj_ = File_Type.factory()
obj_.build(child_)
self.File.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'References_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class References_Type
class File_Type(GeneratedsSuper):
"""Type for an external reference to a resourceReference key used in
other parts of the packageLocation of external resourceSize in
bytes of the files (if known)Compression type (gzip, bzip2, or
none if empty or not specified)Chunk size (except for last
chunk)"""
subclass = None
superclass = None
def __init__(self, compression='', href=None, chunkSize=None, id=None, size=None, anytypeobjs_=None):
self.compression = _cast(None, compression)
self.href = _cast(None, href)
self.chunkSize = _cast(int, chunkSize)
self.id = _cast(None, id)
self.size = _cast(int, size)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if File_Type.subclass:
return File_Type.subclass(*args_, **kwargs_)
else:
return File_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_compression(self): return self.compression
def set_compression(self, compression): self.compression = compression
def get_href(self): return self.href
def set_href(self, href): self.href = href
def get_chunkSize(self): return self.chunkSize
def set_chunkSize(self, chunkSize): self.chunkSize = chunkSize
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_size(self): return self.size
def set_size(self, size): self.size = size
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='File_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='File_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='File_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.compression is not None and 'compression' not in already_processed:
already_processed.append('compression')
outfile.write(' compression=%s' % (self.gds_format_string(quote_attrib(self.compression).encode(ExternalEncoding), input_name='compression'), ))
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
outfile.write(' href=%s' % (self.gds_format_string(quote_attrib(self.href).encode(ExternalEncoding), input_name='href'), ))
if self.chunkSize is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
outfile.write(' chunkSize="%s"' % self.gds_format_integer(self.chunkSize, input_name='chunkSize'))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='File_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='File_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.compression is not None and 'compression' not in already_processed:
already_processed.append('compression')
showIndent(outfile, level)
outfile.write('compression = "%s",\n' % (self.compression,))
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
showIndent(outfile, level)
outfile.write('href = "%s",\n' % (self.href,))
if self.chunkSize is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
showIndent(outfile, level)
outfile.write('chunkSize = %d,\n' % (self.chunkSize,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
showIndent(outfile, level)
outfile.write('size = %d,\n' % (self.size,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('compression', node)
if value is not None and 'compression' not in already_processed:
already_processed.append('compression')
self.compression = value
value = find_attr_value_('href', node)
if value is not None and 'href' not in already_processed:
already_processed.append('href')
self.href = value
value = find_attr_value_('chunkSize', node)
if value is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
try:
self.chunkSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.append('size')
try:
self.size = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'File_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class File_Type
class Content_Type(GeneratedsSuper):
"""Base class for content"""
subclass = None
superclass = None
def __init__(self, id=None, Info=None, Name=None, Section=None, extensiontype_=None):
self.id = _cast(None, id)
self.Info = Info
self.Name = Name
if Section is None:
self.Section = []
else:
self.Section = Section
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Content_Type.subclass:
return Content_Type.subclass(*args_, **kwargs_)
else:
return Content_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Info(self): return self.Info
def set_Info(self, Info): self.Info = Info
def get_Name(self): return self.Name
def set_Name(self, Name): self.Name = Name
def get_Section(self): return self.Section
def set_Section(self, Section): self.Section = Section
def add_Section(self, value): self.Section.append(value)
def insert_Section(self, index, value): self.Section[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='Content_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Content_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Content_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Content_Type', fromsubclass_=False):
if self.Info is not None:
self.Info.export(outfile, level, namespace_, name_='Info', )
if self.Name is not None:
self.Name.export(outfile, level, namespace_, name_='Name')
for Section_ in self.Section:
Section_.export(outfile, level, namespace_, name_='Section')
def hasContent_(self):
if (
self.Info is not None or
self.Name is not None or
self.Section
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Content_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Info is not None:
showIndent(outfile, level)
outfile.write('Info=model_.Msg_Type(\n')
self.Info.exportLiteral(outfile, level, name_='Info')
showIndent(outfile, level)
outfile.write('),\n')
if self.Name is not None:
showIndent(outfile, level)
outfile.write('Name=model_.Msg_Type(\n')
self.Name.exportLiteral(outfile, level, name_='Name')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Section=[\n')
level += 1
for Section_ in self.Section:
showIndent(outfile, level)
outfile.write('model_.Section(\n')
Section_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Info':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Info(obj_)
elif nodeName_ == 'Name':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Name(obj_)
elif nodeName_ == 'Section':
class_obj_ = self.get_class_obj_(child_, Section_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Section.append(obj_)
# end class Content_Type
class VirtualSystem_Type(Content_Type):
"""Content describing a virtual system"""
subclass = None
superclass = Content_Type
def __init__(self, id=None, Info=None, Name=None, Section=None):
super(VirtualSystem_Type, self).__init__(id, Info, Name, Section, )
pass
def factory(*args_, **kwargs_):
if VirtualSystem_Type.subclass:
return VirtualSystem_Type.subclass(*args_, **kwargs_)
else:
return VirtualSystem_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def export(self, outfile, level, namespace_='ovf:', name_='VirtualSystem_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystem_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualSystem_Type'):
super(VirtualSystem_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystem_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualSystem_Type', fromsubclass_=False):
super(VirtualSystem_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(VirtualSystem_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualSystem_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(VirtualSystem_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualSystem_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(VirtualSystem_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(VirtualSystem_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class VirtualSystem_Type
class VirtualSystemCollection_Type(Content_Type):
"""A collection of Content."""
subclass = None
superclass = Content_Type
def __init__(self, id=None, Info=None, Name=None, Section=None, Content=None):
super(VirtualSystemCollection_Type, self).__init__(id, Info, Name, Section, )
if Content is None:
self.Content = []
else:
self.Content = Content
def factory(*args_, **kwargs_):
if VirtualSystemCollection_Type.subclass:
return VirtualSystemCollection_Type.subclass(*args_, **kwargs_)
else:
return VirtualSystemCollection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Content(self): return self.Content
def set_Content(self, Content): self.Content = Content
def add_Content(self, value): self.Content.append(value)
def insert_Content(self, index, value): self.Content[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='VirtualSystemCollection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystemCollection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualSystemCollection_Type'):
super(VirtualSystemCollection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystemCollection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualSystemCollection_Type', fromsubclass_=False):
super(VirtualSystemCollection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Content_ in self.Content:
Content_.export(outfile, level, namespace_, name_='Content')
def hasContent_(self):
if (
self.Content or
super(VirtualSystemCollection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualSystemCollection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(VirtualSystemCollection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualSystemCollection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Content=[\n')
level += 1
for Content_ in self.Content:
showIndent(outfile, level)
outfile.write('model_.Content(\n')
Content_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(VirtualSystemCollection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Content':
class_obj_ = self.get_class_obj_(child_, Content_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Content.append(obj_)
super(VirtualSystemCollection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class VirtualSystemCollection_Type
class Strings_Type(GeneratedsSuper):
"""Type for string resource bundleLocale for this string resource
bundleReference to external resource bundle"""
subclass = None
superclass = None
def __init__(self, lang=None, fileRef=None, Msg=None):
self.lang = _cast(None, lang)
self.fileRef = _cast(None, fileRef)
if Msg is None:
self.Msg = []
else:
self.Msg = Msg
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Strings_Type.subclass:
return Strings_Type.subclass(*args_, **kwargs_)
else:
return Strings_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Msg(self): return self.Msg
def set_Msg(self, Msg): self.Msg = Msg
def add_Msg(self, value): self.Msg.append(value)
def insert_Msg(self, index, value): self.Msg[index] = value
def get_lang(self): return self.lang
def set_lang(self, lang): self.lang = lang
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Strings_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Strings_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Strings_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
outfile.write(' lang=%s' % (self.gds_format_string(quote_attrib(self.lang).encode(ExternalEncoding), input_name='lang'), ))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Strings_Type', fromsubclass_=False):
for Msg_ in self.Msg:
Msg_.export(outfile, level, namespace_, name_='Msg')
def hasContent_(self):
if (
self.Msg
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Strings_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
showIndent(outfile, level)
outfile.write('lang = "%s",\n' % (self.lang,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Msg=[\n')
level += 1
for Msg_ in self.Msg:
showIndent(outfile, level)
outfile.write('model_.MsgType(\n')
Msg_.exportLiteral(outfile, level, name_='MsgType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('lang', node)
if value is not None and 'lang' not in already_processed:
already_processed.append('lang')
self.lang = value
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Msg':
obj_ = MsgType.factory()
obj_.build(child_)
self.Msg.append(obj_)
# end class Strings_Type
class Section_Type(GeneratedsSuper):
"""Base type for Sections, subclassing this is the most common form of
extensibility. Subtypes define more specific elements."""
subclass = None
superclass = None
def __init__(self, required=None, Info=None, extensiontype_=None):
self.required = _cast(None, required)
self.Info = Info
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Section_Type.subclass:
return Section_Type.subclass(*args_, **kwargs_)
else:
return Section_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Info(self): return self.Info
def set_Info(self, Info): self.Info = Info
def get_required(self): return self.required
def set_required(self, required): self.required = required
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='Section_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Section_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Section_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
outfile.write(' required=%s' % (self.gds_format_string(quote_attrib(self.required).encode(ExternalEncoding), input_name='required'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Section_Type', fromsubclass_=False):
if self.Info is not None:
self.Info.export(outfile, level, namespace_, name_='Info', )
def hasContent_(self):
if (
self.Info is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Section_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
showIndent(outfile, level)
outfile.write('required = "%s",\n' % (self.required,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Info is not None:
showIndent(outfile, level)
outfile.write('Info=model_.Msg_Type(\n')
self.Info.exportLiteral(outfile, level, name_='Info')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('required', node)
if value is not None and 'required' not in already_processed:
already_processed.append('required')
self.required = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Info':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Info(obj_)
# end class Section_Type
class Msg_Type(GeneratedsSuper):
"""Type for localizable stringDefault string valueIdentifier for lookup
in string resource bundle for alternate locale"""
subclass = None
superclass = None
def __init__(self, msgid='', valueOf_=None):
self.msgid = _cast(None, msgid)
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Msg_Type.subclass:
return Msg_Type.subclass(*args_, **kwargs_)
else:
return Msg_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_msgid(self): return self.msgid
def set_msgid(self, msgid): self.msgid = msgid
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Msg_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Msg_Type')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Msg_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
outfile.write(' msgid=%s' % (self.gds_format_string(quote_attrib(self.msgid).encode(ExternalEncoding), input_name='msgid'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Msg_Type', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Msg_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
showIndent(outfile, level)
outfile.write('msgid = "%s",\n' % (self.msgid,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('msgid', node)
if value is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
self.msgid = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Msg_Type
class AnnotationSection_Type(Section_Type):
"""User defined annotation"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Annotation=None, anytypeobjs_=None):
super(AnnotationSection_Type, self).__init__(required, Info, )
self.Annotation = Annotation
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if AnnotationSection_Type.subclass:
return AnnotationSection_Type.subclass(*args_, **kwargs_)
else:
return AnnotationSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Annotation(self): return self.Annotation
def set_Annotation(self, Annotation): self.Annotation = Annotation
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='AnnotationSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='AnnotationSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='AnnotationSection_Type'):
super(AnnotationSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='AnnotationSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='AnnotationSection_Type', fromsubclass_=False):
super(AnnotationSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Annotation is not None:
self.Annotation.export(outfile, level, namespace_, name_='Annotation', )
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Annotation is not None or
self.anytypeobjs_ or
super(AnnotationSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='AnnotationSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(AnnotationSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(AnnotationSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Annotation is not None:
showIndent(outfile, level)
outfile.write('Annotation=model_.Msg_Type(\n')
self.Annotation.exportLiteral(outfile, level, name_='Annotation')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(AnnotationSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Annotation':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Annotation(obj_)
else:
obj_ = self.gds_build_any(child_, 'AnnotationSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(AnnotationSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class AnnotationSection_Type
class ProductSection_Type(Section_Type):
"""Product information for a virtual applianceProperties for
application-level customizationProperty identifier
prefixProperty identifier suffix"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, instance='', classxx='', Product=None, Vendor=None, Version=None, FullVersion=None, ProductUrl=None, VendorUrl=None, AppUrl=None, Icon=None, Category=None, Property=None, anytypeobjs_=None):
super(ProductSection_Type, self).__init__(required, Info, )
self.instance = _cast(None, instance)
self.classxx = _cast(None, classxx)
self.Product = Product
self.Vendor = Vendor
self.Version = Version
self.FullVersion = FullVersion
self.ProductUrl = ProductUrl
self.VendorUrl = VendorUrl
self.AppUrl = AppUrl
if Icon is None:
self.Icon = []
else:
self.Icon = Icon
if Category is None:
self.Category = []
else:
self.Category = Category
if Property is None:
self.Property = []
else:
self.Property = Property
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if ProductSection_Type.subclass:
return ProductSection_Type.subclass(*args_, **kwargs_)
else:
return ProductSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Product(self): return self.Product
def set_Product(self, Product): self.Product = Product
def get_Vendor(self): return self.Vendor
def set_Vendor(self, Vendor): self.Vendor = Vendor
def get_Version(self): return self.Version
def set_Version(self, Version): self.Version = Version
def get_FullVersion(self): return self.FullVersion
def set_FullVersion(self, FullVersion): self.FullVersion = FullVersion
def get_ProductUrl(self): return self.ProductUrl
def set_ProductUrl(self, ProductUrl): self.ProductUrl = ProductUrl
def get_VendorUrl(self): return self.VendorUrl
def set_VendorUrl(self, VendorUrl): self.VendorUrl = VendorUrl
def get_AppUrl(self): return self.AppUrl
def set_AppUrl(self, AppUrl): self.AppUrl = AppUrl
def get_Icon(self): return self.Icon
def set_Icon(self, Icon): self.Icon = Icon
def add_Icon(self, value): self.Icon.append(value)
def insert_Icon(self, index, value): self.Icon[index] = value
def get_Category(self): return self.Category
def set_Category(self, Category): self.Category = Category
def add_Category(self, value): self.Category.append(value)
def insert_Category(self, index, value): self.Category[index] = value
def get_Property(self): return self.Property
def set_Property(self, Property): self.Property = Property
def add_Property(self, value): self.Property.append(value)
def insert_Property(self, index, value): self.Property[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_instance(self): return self.instance
def set_instance(self, instance): self.instance = instance
def get_class(self): return self.classxx
def set_class(self, classxx): self.classxx = classxx
def export(self, outfile, level, namespace_='ovf:', name_='ProductSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ProductSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ProductSection_Type'):
super(ProductSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ProductSection_Type')
if self.instance is not None and 'instance' not in already_processed:
already_processed.append('instance')
outfile.write(' instance=%s' % (self.gds_format_string(quote_attrib(self.instance).encode(ExternalEncoding), input_name='instance'), ))
if self.classxx is not None and 'classxx' not in already_processed:
already_processed.append('classxx')
outfile.write(' class=%s' % (self.gds_format_string(quote_attrib(self.classxx).encode(ExternalEncoding), input_name='class'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ProductSection_Type', fromsubclass_=False):
super(ProductSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Product is not None:
self.Product.export(outfile, level, namespace_, name_='Product')
if self.Vendor is not None:
self.Vendor.export(outfile, level, namespace_, name_='Vendor')
if self.Version is not None:
self.Version.export(outfile, level, namespace_, name_='Version')
if self.FullVersion is not None:
self.FullVersion.export(outfile, level, namespace_, name_='FullVersion')
if self.ProductUrl is not None:
self.ProductUrl.export(outfile, level, namespace_, name_='ProductUrl')
if self.VendorUrl is not None:
self.VendorUrl.export(outfile, level, namespace_, name_='VendorUrl')
if self.AppUrl is not None:
self.AppUrl.export(outfile, level, namespace_, name_='AppUrl')
for Icon_ in self.Icon:
Icon_.export(outfile, level, namespace_, name_='Icon')
for Category_ in self.Category:
Category_.export(outfile, level, namespace_, name_='Category')
for Property_ in self.Property:
Property_.export(outfile, level, namespace_, name_='Property')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Product is not None or
self.Vendor is not None or
self.Version is not None or
self.FullVersion is not None or
self.ProductUrl is not None or
self.VendorUrl is not None or
self.AppUrl is not None or
self.Icon or
self.Category or
self.Property or
self.anytypeobjs_ or
super(ProductSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ProductSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.instance is not None and 'instance' not in already_processed:
already_processed.append('instance')
showIndent(outfile, level)
outfile.write('instance = "%s",\n' % (self.instance,))
if self.classxx is not None and 'classxx' not in already_processed:
already_processed.append('classxx')
showIndent(outfile, level)
outfile.write('classxx = "%s",\n' % (self.classxx,))
super(ProductSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ProductSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Product is not None:
showIndent(outfile, level)
outfile.write('Product=model_.Msg_Type(\n')
self.Product.exportLiteral(outfile, level, name_='Product')
showIndent(outfile, level)
outfile.write('),\n')
if self.Vendor is not None:
showIndent(outfile, level)
outfile.write('Vendor=model_.Msg_Type(\n')
self.Vendor.exportLiteral(outfile, level, name_='Vendor')
showIndent(outfile, level)
outfile.write('),\n')
if self.Version is not None:
showIndent(outfile, level)
outfile.write('Version=model_.cimString(\n')
self.Version.exportLiteral(outfile, level, name_='Version')
showIndent(outfile, level)
outfile.write('),\n')
if self.FullVersion is not None:
showIndent(outfile, level)
outfile.write('FullVersion=model_.cimString(\n')
self.FullVersion.exportLiteral(outfile, level, name_='FullVersion')
showIndent(outfile, level)
outfile.write('),\n')
if self.ProductUrl is not None:
showIndent(outfile, level)
outfile.write('ProductUrl=model_.cimString(\n')
self.ProductUrl.exportLiteral(outfile, level, name_='ProductUrl')
showIndent(outfile, level)
outfile.write('),\n')
if self.VendorUrl is not None:
showIndent(outfile, level)
outfile.write('VendorUrl=model_.cimString(\n')
self.VendorUrl.exportLiteral(outfile, level, name_='VendorUrl')
showIndent(outfile, level)
outfile.write('),\n')
if self.AppUrl is not None:
showIndent(outfile, level)
outfile.write('AppUrl=model_.cimString(\n')
self.AppUrl.exportLiteral(outfile, level, name_='AppUrl')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Icon=[\n')
level += 1
for Icon_ in self.Icon:
showIndent(outfile, level)
outfile.write('model_.IconType(\n')
Icon_.exportLiteral(outfile, level, name_='IconType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Category=[\n')
level += 1
for Category_ in self.Category:
showIndent(outfile, level)
outfile.write('model_.Msg_Type(\n')
Category_.exportLiteral(outfile, level, name_='Msg_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property_ in self.Property:
showIndent(outfile, level)
outfile.write('model_.PropertyType(\n')
Property_.exportLiteral(outfile, level, name_='PropertyType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('instance', node)
if value is not None and 'instance' not in already_processed:
already_processed.append('instance')
self.instance = value
value = find_attr_value_('class', node)
if value is not None and 'class' not in already_processed:
already_processed.append('class')
self.classxx = value
super(ProductSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Product':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Product(obj_)
elif nodeName_ == 'Vendor':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Vendor(obj_)
elif nodeName_ == 'Version':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Version(obj_)
elif nodeName_ == 'FullVersion':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_FullVersion(obj_)
elif nodeName_ == 'ProductUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ProductUrl(obj_)
elif nodeName_ == 'VendorUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VendorUrl(obj_)
elif nodeName_ == 'AppUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AppUrl(obj_)
elif nodeName_ == 'Icon':
obj_ = IconType.factory()
obj_.build(child_)
self.Icon.append(obj_)
elif nodeName_ == 'Category':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.Category.append(obj_)
elif nodeName_ == 'Property':
obj_ = PropertyType.factory()
obj_.build(child_)
self.Property.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'ProductSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ProductSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class ProductSection_Type
class PropertyConfigurationValue_Type(GeneratedsSuper):
"""Type for alternative default values for properties when
DeploymentOptionSection is usedAlternative default property
valueConfiguration from DeploymentOptionSection in which this
value is default"""
subclass = None
superclass = None
def __init__(self, configuration=None, value=None, anytypeobjs_=None):
self.configuration = _cast(None, configuration)
self.value = _cast(None, value)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if PropertyConfigurationValue_Type.subclass:
return PropertyConfigurationValue_Type.subclass(*args_, **kwargs_)
else:
return PropertyConfigurationValue_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_configuration(self): return self.configuration
def set_configuration(self, configuration): self.configuration = configuration
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='PropertyConfigurationValue_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PropertyConfigurationValue_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='PropertyConfigurationValue_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
outfile.write(' configuration=%s' % (self.gds_format_string(quote_attrib(self.configuration).encode(ExternalEncoding), input_name='configuration'), ))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='PropertyConfigurationValue_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='PropertyConfigurationValue_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
showIndent(outfile, level)
outfile.write('configuration = "%s",\n' % (self.configuration,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('configuration', node)
if value is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
self.configuration = value
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'PropertyConfigurationValue_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PropertyConfigurationValue_Type
class NetworkSection_Type(Section_Type):
"""Descriptions of logical networks used within the package"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Network=None, anytypeobjs_=None):
super(NetworkSection_Type, self).__init__(required, Info, )
if Network is None:
self.Network = []
else:
self.Network = Network
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if NetworkSection_Type.subclass:
return NetworkSection_Type.subclass(*args_, **kwargs_)
else:
return NetworkSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Network(self): return self.Network
def set_Network(self, Network): self.Network = Network
def add_Network(self, value): self.Network.append(value)
def insert_Network(self, index, value): self.Network[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='NetworkSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='NetworkSection_Type'):
super(NetworkSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='NetworkSection_Type', fromsubclass_=False):
super(NetworkSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Network_ in self.Network:
Network_.export(outfile, level, namespace_, name_='Network')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Network or
self.anytypeobjs_ or
super(NetworkSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='NetworkSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(NetworkSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(NetworkSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Network=[\n')
level += 1
for Network_ in self.Network:
showIndent(outfile, level)
outfile.write('model_.NetworkType(\n')
Network_.exportLiteral(outfile, level, name_='NetworkType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(NetworkSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Network':
obj_ = NetworkType.factory()
obj_.build(child_)
self.Network.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'NetworkSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(NetworkSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class NetworkSection_Type
class DiskSection_Type(Section_Type):
"""Descriptions of virtual disks used within the package"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Disk=None, anytypeobjs_=None):
super(DiskSection_Type, self).__init__(required, Info, )
if Disk is None:
self.Disk = []
else:
self.Disk = Disk
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if DiskSection_Type.subclass:
return DiskSection_Type.subclass(*args_, **kwargs_)
else:
return DiskSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Disk(self): return self.Disk
def set_Disk(self, Disk): self.Disk = Disk
def add_Disk(self, value): self.Disk.append(value)
def insert_Disk(self, index, value): self.Disk[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='DiskSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DiskSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='DiskSection_Type'):
super(DiskSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='DiskSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='DiskSection_Type', fromsubclass_=False):
super(DiskSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Disk_ in self.Disk:
Disk_.export(outfile, level, namespace_, name_='Disk')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Disk or
self.anytypeobjs_ or
super(DiskSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DiskSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(DiskSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(DiskSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Disk=[\n')
level += 1
for Disk_ in self.Disk:
showIndent(outfile, level)
outfile.write('model_.VirtualDiskDesc_Type(\n')
Disk_.exportLiteral(outfile, level, name_='VirtualDiskDesc_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(DiskSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Disk':
obj_ = VirtualDiskDesc_Type.factory()
obj_.build(child_)
self.Disk.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'DiskSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(DiskSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class DiskSection_Type
class VirtualDiskDesc_Type(GeneratedsSuper):
"""Type for virtual disk descriptorIdentifier for virtual diskReference
to virtual disk content. If not specified a blank virtual disk
is created of size given by capacity attributeVirtual disk
capacity, can be specified as either an xs:long size or as a
reference to a property using ${property_name}. Unit of
allocation for ovf:capacity. If not specified default value is
bytes. Value shall match a recognized value for the UNITS
qualifier in DSP0004.Format of virtual disk given as a URI that
identifies the disk typeEstimated populated size of disk in
bytesReference to potential parent disk"""
subclass = None
superclass = None
def __init__(self, capacityAllocationUnits='byte', capacity=None, format=None, parentRef=None, fileRef=None, populatedSize=None, diskId=None, anytypeobjs_=None):
self.capacityAllocationUnits = _cast(None, capacityAllocationUnits)
self.capacity = _cast(None, capacity)
self.format = _cast(None, format)
self.parentRef = _cast(None, parentRef)
self.fileRef = _cast(None, fileRef)
self.populatedSize = _cast(int, populatedSize)
self.diskId = _cast(None, diskId)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if VirtualDiskDesc_Type.subclass:
return VirtualDiskDesc_Type.subclass(*args_, **kwargs_)
else:
return VirtualDiskDesc_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_capacityAllocationUnits(self): return self.capacityAllocationUnits
def set_capacityAllocationUnits(self, capacityAllocationUnits): self.capacityAllocationUnits = capacityAllocationUnits
def get_capacity(self): return self.capacity
def set_capacity(self, capacity): self.capacity = capacity
def get_format(self): return self.format
def set_format(self, format): self.format = format
def get_parentRef(self): return self.parentRef
def set_parentRef(self, parentRef): self.parentRef = parentRef
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_populatedSize(self): return self.populatedSize
def set_populatedSize(self, populatedSize): self.populatedSize = populatedSize
def get_diskId(self): return self.diskId
def set_diskId(self, diskId): self.diskId = diskId
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='VirtualDiskDesc_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualDiskDesc_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualDiskDesc_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.capacityAllocationUnits is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
outfile.write(' capacityAllocationUnits=%s' % (self.gds_format_string(quote_attrib(self.capacityAllocationUnits).encode(ExternalEncoding), input_name='capacityAllocationUnits'), ))
if self.capacity is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
outfile.write(' capacity=%s' % (self.gds_format_string(quote_attrib(self.capacity).encode(ExternalEncoding), input_name='capacity'), ))
if self.format is not None and 'format' not in already_processed:
already_processed.append('format')
outfile.write(' format=%s' % (self.gds_format_string(quote_attrib(self.format).encode(ExternalEncoding), input_name='format'), ))
if self.parentRef is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
outfile.write(' parentRef=%s' % (self.gds_format_string(quote_attrib(self.parentRef).encode(ExternalEncoding), input_name='parentRef'), ))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
if self.populatedSize is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
outfile.write(' populatedSize="%s"' % self.gds_format_integer(self.populatedSize, input_name='populatedSize'))
if self.diskId is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
outfile.write(' diskId=%s' % (self.gds_format_string(quote_attrib(self.diskId).encode(ExternalEncoding), input_name='diskId'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualDiskDesc_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualDiskDesc_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.capacityAllocationUnits is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
showIndent(outfile, level)
outfile.write('capacityAllocationUnits = "%s",\n' % (self.capacityAllocationUnits,))
if self.capacity is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
showIndent(outfile, level)
outfile.write('capacity = "%s",\n' % (self.capacity,))
if self.format is not None and 'format' not in already_processed:
already_processed.append('format')
showIndent(outfile, level)
outfile.write('format = "%s",\n' % (self.format,))
if self.parentRef is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
showIndent(outfile, level)
outfile.write('parentRef = "%s",\n' % (self.parentRef,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
if self.populatedSize is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
showIndent(outfile, level)
outfile.write('populatedSize = %d,\n' % (self.populatedSize,))
if self.diskId is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
showIndent(outfile, level)
outfile.write('diskId = "%s",\n' % (self.diskId,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('capacityAllocationUnits', node)
if value is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
self.capacityAllocationUnits = value
value = find_attr_value_('capacity', node)
if value is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
self.capacity = value
value = find_attr_value_('format', node)
if value is not None and 'format' not in already_processed:
already_processed.append('format')
self.format = value
value = find_attr_value_('parentRef', node)
if value is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
self.parentRef = value
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
value = find_attr_value_('populatedSize', node)
if value is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
try:
self.populatedSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('diskId', node)
if value is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
self.diskId = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'VirtualDiskDesc_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class VirtualDiskDesc_Type
class OperatingSystemSection_Type(Section_Type):
"""Specification of the operating system installed in the
guestIdentifier defined by the CIM_OperatingSystem.OsType
enumerationVersion defined by the CIM_OperatingSystem.Version
field"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, version=None, id=None, Description=None, anytypeobjs_=None):
super(OperatingSystemSection_Type, self).__init__(required, Info, )
self.version = _cast(None, version)
self.id = _cast(int, id)
self.Description = Description
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if OperatingSystemSection_Type.subclass:
return OperatingSystemSection_Type.subclass(*args_, **kwargs_)
else:
return OperatingSystemSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_version(self): return self.version
def set_version(self, version): self.version = version
def get_id(self): return self.id
def set_id(self, id): self.id = id
def export(self, outfile, level, namespace_='ovf:', name_='OperatingSystemSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='OperatingSystemSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='OperatingSystemSection_Type'):
super(OperatingSystemSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='OperatingSystemSection_Type')
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
outfile.write(' version=%s' % (self.gds_format_string(quote_attrib(self.version).encode(ExternalEncoding), input_name='version'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='OperatingSystemSection_Type', fromsubclass_=False):
super(OperatingSystemSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Description is not None or
self.anytypeobjs_ or
super(OperatingSystemSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='OperatingSystemSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
showIndent(outfile, level)
outfile.write('version = "%s",\n' % (self.version,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = %d,\n' % (self.id,))
super(OperatingSystemSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(OperatingSystemSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('version', node)
if value is not None and 'version' not in already_processed:
already_processed.append('version')
self.version = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
try:
self.id = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
super(OperatingSystemSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
else:
obj_ = self.gds_build_any(child_, 'OperatingSystemSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(OperatingSystemSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class OperatingSystemSection_Type
class EulaSection_Type(Section_Type):
"""End-User License Agreement"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, License=None, anytypeobjs_=None):
super(EulaSection_Type, self).__init__(required, Info, )
self.License = License
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if EulaSection_Type.subclass:
return EulaSection_Type.subclass(*args_, **kwargs_)
else:
return EulaSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_License(self): return self.License
def set_License(self, License): self.License = License
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='EulaSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='EulaSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='EulaSection_Type'):
super(EulaSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='EulaSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='EulaSection_Type', fromsubclass_=False):
super(EulaSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.License is not None:
self.License.export(outfile, level, namespace_, name_='License', )
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.License is not None or
self.anytypeobjs_ or
super(EulaSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='EulaSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(EulaSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(EulaSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.License is not None:
showIndent(outfile, level)
outfile.write('License=model_.Msg_Type(\n')
self.License.exportLiteral(outfile, level, name_='License')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(EulaSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'License':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_License(obj_)
else:
obj_ = self.gds_build_any(child_, 'EulaSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(EulaSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class EulaSection_Type
class VirtualHardwareSection_Type(Section_Type):
"""Specifies virtual hardware requirements for a virtual machineUnique
identifier of this VirtualHardwareSection (within a
VirtualSystem)"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, id='', transport=None, System=None, Item=None, anytypeobjs_=None):
super(VirtualHardwareSection_Type, self).__init__(required, Info, )
self.id = _cast(None, id)
self.transport = _cast(None, transport)
self.System = System
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if VirtualHardwareSection_Type.subclass:
return VirtualHardwareSection_Type.subclass(*args_, **kwargs_)
else:
return VirtualHardwareSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_System(self): return self.System
def set_System(self, System): self.System = System
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_transport(self): return self.transport
def set_transport(self, transport): self.transport = transport
def export(self, outfile, level, namespace_='ovf:', name_='VirtualHardwareSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualHardwareSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualHardwareSection_Type'):
super(VirtualHardwareSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualHardwareSection_Type')
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.transport is not None and 'transport' not in already_processed:
already_processed.append('transport')
outfile.write(' transport=%s' % (self.gds_format_string(quote_attrib(self.transport).encode(ExternalEncoding), input_name='transport'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualHardwareSection_Type', fromsubclass_=False):
super(VirtualHardwareSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.System is not None:
self.System.export(outfile, level, namespace_, name_='System')
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.System is not None or
self.Item or
self.anytypeobjs_ or
super(VirtualHardwareSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualHardwareSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
if self.transport is not None and 'transport' not in already_processed:
already_processed.append('transport')
showIndent(outfile, level)
outfile.write('transport = "%s",\n' % (self.transport,))
super(VirtualHardwareSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualHardwareSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.System is not None:
showIndent(outfile, level)
outfile.write('System=model_.VSSD_Type(\n')
self.System.exportLiteral(outfile, level, name_='System')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.RASD_Type(\n')
Item_.exportLiteral(outfile, level, name_='RASD_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
value = find_attr_value_('transport', node)
if value is not None and 'transport' not in already_processed:
already_processed.append('transport')
self.transport = value
super(VirtualHardwareSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'System':
obj_ = VSSD_Type.factory()
obj_.build(child_)
self.set_System(obj_)
elif nodeName_ == 'Item':
obj_ = RASD_Type.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'VirtualHardwareSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(VirtualHardwareSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class VirtualHardwareSection_Type
class ResourceAllocationSection_Type(Section_Type):
"""Resource constraints on a VirtualSystemCollection"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Item=None, anytypeobjs_=None):
super(ResourceAllocationSection_Type, self).__init__(required, Info, )
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if ResourceAllocationSection_Type.subclass:
return ResourceAllocationSection_Type.subclass(*args_, **kwargs_)
else:
return ResourceAllocationSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='ResourceAllocationSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ResourceAllocationSection_Type'):
super(ResourceAllocationSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ResourceAllocationSection_Type', fromsubclass_=False):
super(ResourceAllocationSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Item or
self.anytypeobjs_ or
super(ResourceAllocationSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ResourceAllocationSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(ResourceAllocationSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ResourceAllocationSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.RASD_Type(\n')
Item_.exportLiteral(outfile, level, name_='RASD_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(ResourceAllocationSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = RASD_Type.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'ResourceAllocationSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ResourceAllocationSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class ResourceAllocationSection_Type
class InstallSection_Type(Section_Type):
"""If present indicates that the virtual machine needs to be initially
booted to install and configure the softwareDelay in seconds to
wait for power off to complete after initial boot"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, initialBootStopDelay=0, anytypeobjs_=None):
super(InstallSection_Type, self).__init__(required, Info, )
self.initialBootStopDelay = _cast(int, initialBootStopDelay)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if InstallSection_Type.subclass:
return InstallSection_Type.subclass(*args_, **kwargs_)
else:
return InstallSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_initialBootStopDelay(self): return self.initialBootStopDelay
def set_initialBootStopDelay(self, initialBootStopDelay): self.initialBootStopDelay = initialBootStopDelay
def export(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='InstallSection_Type'):
super(InstallSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
outfile.write(' initialBootStopDelay="%s"' % self.gds_format_integer(self.initialBootStopDelay, input_name='initialBootStopDelay'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', fromsubclass_=False):
super(InstallSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(InstallSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='InstallSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
showIndent(outfile, level)
outfile.write('initialBootStopDelay = %d,\n' % (self.initialBootStopDelay,))
super(InstallSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(InstallSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('initialBootStopDelay', node)
if value is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
try:
self.initialBootStopDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
super(InstallSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'InstallSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(InstallSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class InstallSection_Type
class StartupSection_Type(Section_Type):
"""Specifies the order in which entities in a VirtualSystemCollection
are powered on and shut down"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Item=None, anytypeobjs_=None):
super(StartupSection_Type, self).__init__(required, Info, )
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if StartupSection_Type.subclass:
return StartupSection_Type.subclass(*args_, **kwargs_)
else:
return StartupSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='StartupSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='StartupSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='StartupSection_Type'):
super(StartupSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='StartupSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='StartupSection_Type', fromsubclass_=False):
super(StartupSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Item or
self.anytypeobjs_ or
super(StartupSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='StartupSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(StartupSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(StartupSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.ItemType(\n')
Item_.exportLiteral(outfile, level, name_='ItemType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(StartupSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = ItemType.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'StartupSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(StartupSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class StartupSection_Type
class DeploymentOptionSection_Type(Section_Type):
"""Enumeration of discrete deployment options"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Configuration=None, anytypeobjs_=None):
super(DeploymentOptionSection_Type, self).__init__(required, Info, )
if Configuration is None:
self.Configuration = []
else:
self.Configuration = Configuration
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if DeploymentOptionSection_Type.subclass:
return DeploymentOptionSection_Type.subclass(*args_, **kwargs_)
else:
return DeploymentOptionSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Configuration(self): return self.Configuration
def set_Configuration(self, Configuration): self.Configuration = Configuration
def add_Configuration(self, value): self.Configuration.append(value)
def insert_Configuration(self, index, value): self.Configuration[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='DeploymentOptionSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DeploymentOptionSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='DeploymentOptionSection_Type'):
super(DeploymentOptionSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='DeploymentOptionSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='DeploymentOptionSection_Type', fromsubclass_=False):
super(DeploymentOptionSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Configuration_ in self.Configuration:
Configuration_.export(outfile, level, namespace_, name_='Configuration')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Configuration or
self.anytypeobjs_ or
super(DeploymentOptionSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DeploymentOptionSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(DeploymentOptionSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(DeploymentOptionSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Configuration=[\n')
level += 1
for Configuration_ in self.Configuration:
showIndent(outfile, level)
outfile.write('model_.ConfigurationType(\n')
Configuration_.exportLiteral(outfile, level, name_='ConfigurationType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(DeploymentOptionSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Configuration':
obj_ = ConfigurationType.factory()
obj_.build(child_)
self.Configuration.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'DeploymentOptionSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(DeploymentOptionSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class DeploymentOptionSection_Type
class cimDateTime(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CIM_DateTime=None, Interval=None, Date=None, Time=None, Datetime=None):
self.CIM_DateTime = CIM_DateTime
self.Interval = Interval
self.Date = Date
self.Time = Time
self.Datetime = Datetime
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimDateTime.subclass:
return cimDateTime.subclass(*args_, **kwargs_)
else:
return cimDateTime(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CIM_DateTime(self): return self.CIM_DateTime
def set_CIM_DateTime(self, CIM_DateTime): self.CIM_DateTime = CIM_DateTime
def get_Interval(self): return self.Interval
def set_Interval(self, Interval): self.Interval = Interval
def get_Date(self): return self.Date
def set_Date(self, Date): self.Date = Date
def get_Time(self): return self.Time
def set_Time(self, Time): self.Time = Time
def get_Datetime(self): return self.Datetime
def set_Datetime(self, Datetime): self.Datetime = Datetime
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimDateTime', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimDateTime')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimDateTime'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimDateTime', fromsubclass_=False):
if self.CIM_DateTime is not None:
showIndent(outfile, level)
outfile.write('<%sCIM_DateTime>%s</%sCIM_DateTime>\n' % (namespace_, self.gds_format_string(quote_xml(self.CIM_DateTime).encode(ExternalEncoding), input_name='CIM_DateTime'), namespace_))
if self.Interval is not None:
showIndent(outfile, level)
outfile.write('<%sInterval>%s</%sInterval>\n' % (namespace_, self.gds_format_string(quote_xml(self.Interval).encode(ExternalEncoding), input_name='Interval'), namespace_))
if self.Date is not None:
showIndent(outfile, level)
outfile.write('<%sDate>%s</%sDate>\n' % (namespace_, self.gds_format_string(quote_xml(self.Date).encode(ExternalEncoding), input_name='Date'), namespace_))
if self.Time is not None:
showIndent(outfile, level)
outfile.write('<%sTime>%s</%sTime>\n' % (namespace_, self.gds_format_string(quote_xml(self.Time).encode(ExternalEncoding), input_name='Time'), namespace_))
if self.Datetime is not None:
showIndent(outfile, level)
outfile.write('<%sDatetime>%s</%sDatetime>\n' % (namespace_, self.gds_format_string(quote_xml(self.Datetime).encode(ExternalEncoding), input_name='Datetime'), namespace_))
def hasContent_(self):
if (
self.CIM_DateTime is not None or
self.Interval is not None or
self.Date is not None or
self.Time is not None or
self.Datetime is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimDateTime'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.CIM_DateTime is not None:
showIndent(outfile, level)
outfile.write('CIM_DateTime=%s,\n' % quote_python(self.CIM_DateTime).encode(ExternalEncoding))
if self.Interval is not None:
showIndent(outfile, level)
outfile.write('Interval=%s,\n' % quote_python(self.Interval).encode(ExternalEncoding))
if self.Date is not None:
showIndent(outfile, level)
outfile.write('Date=%s,\n' % quote_python(self.Date).encode(ExternalEncoding))
if self.Time is not None:
showIndent(outfile, level)
outfile.write('Time=%s,\n' % quote_python(self.Time).encode(ExternalEncoding))
if self.Datetime is not None:
showIndent(outfile, level)
outfile.write('Datetime=%s,\n' % quote_python(self.Datetime).encode(ExternalEncoding))
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CIM_DateTime':
CIM_DateTime_ = child_.text
CIM_DateTime_ = self.gds_validate_string(CIM_DateTime_, node, 'CIM_DateTime')
self.CIM_DateTime = CIM_DateTime_
elif nodeName_ == 'Interval':
Interval_ = child_.text
Interval_ = self.gds_validate_string(Interval_, node, 'Interval')
self.Interval = Interval_
elif nodeName_ == 'Date':
Date_ = child_.text
Date_ = self.gds_validate_string(Date_, node, 'Date')
self.Date = Date_
elif nodeName_ == 'Time':
Time_ = child_.text
Time_ = self.gds_validate_string(Time_, node, 'Time')
self.Time = Time_
elif nodeName_ == 'Datetime':
Datetime_ = child_.text
Datetime_ = self.gds_validate_string(Datetime_, node, 'Datetime')
self.Datetime = Datetime_
# end class cimDateTime
class cimUnsignedByte(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedByte.subclass:
return cimUnsignedByte.subclass(*args_, **kwargs_)
else:
return cimUnsignedByte(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedByte', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedByte')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedByte'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedByte', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedByte'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedByte
class cimByte(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimByte.subclass:
return cimByte.subclass(*args_, **kwargs_)
else:
return cimByte(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimByte', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimByte')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimByte'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimByte', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimByte'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimByte
class cimUnsignedShort(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedShort.subclass:
return cimUnsignedShort.subclass(*args_, **kwargs_)
else:
return cimUnsignedShort(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedShort', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedShort')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedShort'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedShort', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedShort'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedShort
class cimShort(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimShort.subclass:
return cimShort.subclass(*args_, **kwargs_)
else:
return cimShort(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimShort', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimShort')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimShort'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimShort', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimShort'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimShort
class cimUnsignedInt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimUnsignedInt.subclass:
return cimUnsignedInt.subclass(*args_, **kwargs_)
else:
return cimUnsignedInt(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedInt', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedInt')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedInt'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedInt', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedInt'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedInt
class cimInt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimInt.subclass:
return cimInt.subclass(*args_, **kwargs_)
else:
return cimInt(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimInt', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimInt')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimInt'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimInt', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimInt'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimInt
class cimUnsignedLong(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedLong.subclass:
return cimUnsignedLong.subclass(*args_, **kwargs_)
else:
return cimUnsignedLong(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedLong', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedLong')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedLong'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedLong', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedLong'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedLong
class cimLong(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimLong.subclass:
return cimLong.subclass(*args_, **kwargs_)
else:
return cimLong(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimLong', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimLong')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimLong'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimLong', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimLong'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimLong
class cimString(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimString.subclass:
return cimString.subclass(*args_, **kwargs_)
else:
return cimString(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimString', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimString')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimString'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimString', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimString'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimString
class cimBoolean(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimBoolean.subclass:
return cimBoolean.subclass(*args_, **kwargs_)
else:
return cimBoolean(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimBoolean', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimBoolean')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimBoolean'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimBoolean', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimBoolean'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimBoolean
class cimFloat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimFloat.subclass:
return cimFloat.subclass(*args_, **kwargs_)
else:
return cimFloat(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimFloat', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimFloat')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimFloat'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimFloat', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimFloat'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimFloat
class cimDouble(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimDouble.subclass:
return cimDouble.subclass(*args_, **kwargs_)
else:
return cimDouble(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimDouble', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimDouble')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimDouble'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimDouble', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimDouble'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimDouble
class cimChar16(cimString):
subclass = None
superclass = cimString
def __init__(self, valueOf_=None):
super(cimChar16, self).__init__(valueOf_, )
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimChar16.subclass:
return cimChar16.subclass(*args_, **kwargs_)
else:
return cimChar16(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimChar16', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimChar16')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimChar16'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(cimChar16, self).exportAttributes(outfile, level, already_processed, namespace_, name_='cimChar16')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimChar16', fromsubclass_=False):
super(cimChar16, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(cimChar16, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimChar16'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(cimChar16, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(cimChar16, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(cimChar16, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimChar16
class cimBase64Binary(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimBase64Binary.subclass:
return cimBase64Binary.subclass(*args_, **kwargs_)
else:
return cimBase64Binary(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimBase64Binary', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimBase64Binary')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimBase64Binary'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimBase64Binary', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimBase64Binary'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimBase64Binary
class cimReference(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, anytypeobjs_=None):
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimReference.subclass:
return cimReference.subclass(*args_, **kwargs_)
else:
return cimReference(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimReference', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimReference')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimReference'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimReference', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimReference'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'cimReference')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class cimReference
class cimHexBinary(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimHexBinary.subclass:
return cimHexBinary.subclass(*args_, **kwargs_)
else:
return cimHexBinary(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimHexBinary', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimHexBinary')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimHexBinary'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimHexBinary', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimHexBinary'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimHexBinary
class cimAnySimpleType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimAnySimpleType.subclass:
return cimAnySimpleType.subclass(*args_, **kwargs_)
else:
return cimAnySimpleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimAnySimpleType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimAnySimpleType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimAnySimpleType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimAnySimpleType', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimAnySimpleType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimAnySimpleType
class qualifierString(cimString):
subclass = None
superclass = cimString
def __init__(self, qualifier=None, valueOf_=None, extensiontype_=None):
super(qualifierString, self).__init__(valueOf_, extensiontype_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if qualifierString.subclass:
return qualifierString.subclass(*args_, **kwargs_)
else:
return qualifierString(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierString', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierString')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierString'):
super(qualifierString, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierString')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierString', fromsubclass_=False):
super(qualifierString, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierString, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierString'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierString, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierString, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(qualifierString, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierString
class qualifierBoolean(cimBoolean):
subclass = None
superclass = cimBoolean
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierBoolean, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierBoolean.subclass:
return qualifierBoolean.subclass(*args_, **kwargs_)
else:
return qualifierBoolean(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierBoolean', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierBoolean')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierBoolean'):
super(qualifierBoolean, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierBoolean')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierBoolean', fromsubclass_=False):
super(qualifierBoolean, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierBoolean, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierBoolean'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierBoolean, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierBoolean, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierBoolean, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierBoolean
class qualifierUInt32(cimUnsignedInt):
subclass = None
superclass = cimUnsignedInt
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierUInt32, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierUInt32.subclass:
return qualifierUInt32.subclass(*args_, **kwargs_)
else:
return qualifierUInt32(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierUInt32', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierUInt32')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierUInt32'):
super(qualifierUInt32, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierUInt32')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierUInt32', fromsubclass_=False):
super(qualifierUInt32, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierUInt32, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierUInt32'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierUInt32, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierUInt32, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierUInt32, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierUInt32
class qualifierSInt64(cimLong):
subclass = None
superclass = cimLong
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierSInt64, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierSInt64.subclass:
return qualifierSInt64.subclass(*args_, **kwargs_)
else:
return qualifierSInt64(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierSInt64', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSInt64')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierSInt64'):
super(qualifierSInt64, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSInt64')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierSInt64', fromsubclass_=False):
super(qualifierSInt64, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierSInt64, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierSInt64'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierSInt64, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierSInt64, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierSInt64, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierSInt64
class qualifierSArray(qualifierString):
subclass = None
superclass = qualifierString
def __init__(self, qualifier=None):
super(qualifierSArray, self).__init__(qualifier, )
pass
def factory(*args_, **kwargs_):
if qualifierSArray.subclass:
return qualifierSArray.subclass(*args_, **kwargs_)
else:
return qualifierSArray(*args_, **kwargs_)
factory = staticmethod(factory)
def export(self, outfile, level, namespace_='ovf:', name_='qualifierSArray', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSArray')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierSArray'):
super(qualifierSArray, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSArray')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierSArray', fromsubclass_=False):
super(qualifierSArray, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
super(qualifierSArray, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierSArray'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(qualifierSArray, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierSArray, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(qualifierSArray, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(qualifierSArray, self).buildChildren(child_, node, nodeName_, True)
pass
# end class qualifierSArray
class Caption(cimString):
subclass = None
superclass = cimString
def __init__(self, valueOf_=None):
super(Caption, self).__init__(valueOf_, )
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Caption.subclass:
return Caption.subclass(*args_, **kwargs_)
else:
return Caption(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Caption', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Caption')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Caption'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(Caption, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Caption')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Caption', fromsubclass_=False):
super(Caption, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(Caption, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Caption'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(Caption, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Caption, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(Caption, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Caption
class CIM_VirtualSystemSettingData_Type(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, AutomaticRecoveryAction=None, AutomaticShutdownAction=None, AutomaticStartupAction=None, AutomaticStartupActionDelay=None, AutomaticStartupActionSequenceNumber=None, Caption=None, ConfigurationDataRoot=None, ConfigurationFile=None, ConfigurationID=None, CreationTime=None, Description=None, ElementName=None, InstanceID=None, LogDataRoot=None, Notes=None, RecoveryFile=None, SnapshotDataRoot=None, SuspendDataRoot=None, SwapFileDataRoot=None, VirtualSystemIdentifier=None, VirtualSystemType=None, anytypeobjs_=None, extensiontype_=None):
self.AutomaticRecoveryAction = AutomaticRecoveryAction
self.AutomaticShutdownAction = AutomaticShutdownAction
self.AutomaticStartupAction = AutomaticStartupAction
self.AutomaticStartupActionDelay = AutomaticStartupActionDelay
self.AutomaticStartupActionSequenceNumber = AutomaticStartupActionSequenceNumber
self.Caption = Caption
self.ConfigurationDataRoot = ConfigurationDataRoot
self.ConfigurationFile = ConfigurationFile
self.ConfigurationID = ConfigurationID
self.CreationTime = CreationTime
self.Description = Description
self.ElementName = ElementName
self.InstanceID = InstanceID
self.LogDataRoot = LogDataRoot
if Notes is None:
self.Notes = []
else:
self.Notes = Notes
self.RecoveryFile = RecoveryFile
self.SnapshotDataRoot = SnapshotDataRoot
self.SuspendDataRoot = SuspendDataRoot
self.SwapFileDataRoot = SwapFileDataRoot
self.VirtualSystemIdentifier = VirtualSystemIdentifier
self.VirtualSystemType = VirtualSystemType
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CIM_VirtualSystemSettingData_Type.subclass:
return CIM_VirtualSystemSettingData_Type.subclass(*args_, **kwargs_)
else:
return CIM_VirtualSystemSettingData_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_AutomaticRecoveryAction(self): return self.AutomaticRecoveryAction
def set_AutomaticRecoveryAction(self, AutomaticRecoveryAction): self.AutomaticRecoveryAction = AutomaticRecoveryAction
def validate_AutomaticRecoveryAction(self, value):
# Validate type AutomaticRecoveryAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticShutdownAction(self): return self.AutomaticShutdownAction
def set_AutomaticShutdownAction(self, AutomaticShutdownAction): self.AutomaticShutdownAction = AutomaticShutdownAction
def validate_AutomaticShutdownAction(self, value):
# Validate type AutomaticShutdownAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticStartupAction(self): return self.AutomaticStartupAction
def set_AutomaticStartupAction(self, AutomaticStartupAction): self.AutomaticStartupAction = AutomaticStartupAction
def validate_AutomaticStartupAction(self, value):
# Validate type AutomaticStartupAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticStartupActionDelay(self): return self.AutomaticStartupActionDelay
def set_AutomaticStartupActionDelay(self, AutomaticStartupActionDelay): self.AutomaticStartupActionDelay = AutomaticStartupActionDelay
def get_AutomaticStartupActionSequenceNumber(self): return self.AutomaticStartupActionSequenceNumber
def set_AutomaticStartupActionSequenceNumber(self, AutomaticStartupActionSequenceNumber): self.AutomaticStartupActionSequenceNumber = AutomaticStartupActionSequenceNumber
def get_Caption(self): return self.Caption
def set_Caption(self, Caption): self.Caption = Caption
def get_ConfigurationDataRoot(self): return self.ConfigurationDataRoot
def set_ConfigurationDataRoot(self, ConfigurationDataRoot): self.ConfigurationDataRoot = ConfigurationDataRoot
def get_ConfigurationFile(self): return self.ConfigurationFile
def set_ConfigurationFile(self, ConfigurationFile): self.ConfigurationFile = ConfigurationFile
def get_ConfigurationID(self): return self.ConfigurationID
def set_ConfigurationID(self, ConfigurationID): self.ConfigurationID = ConfigurationID
def get_CreationTime(self): return self.CreationTime
def set_CreationTime(self, CreationTime): self.CreationTime = CreationTime
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_ElementName(self): return self.ElementName
def set_ElementName(self, ElementName): self.ElementName = ElementName
def get_InstanceID(self): return self.InstanceID
def set_InstanceID(self, InstanceID): self.InstanceID = InstanceID
def get_LogDataRoot(self): return self.LogDataRoot
def set_LogDataRoot(self, LogDataRoot): self.LogDataRoot = LogDataRoot
def get_Notes(self): return self.Notes
def set_Notes(self, Notes): self.Notes = Notes
def add_Notes(self, value): self.Notes.append(value)
def insert_Notes(self, index, value): self.Notes[index] = value
def get_RecoveryFile(self): return self.RecoveryFile
def set_RecoveryFile(self, RecoveryFile): self.RecoveryFile = RecoveryFile
def get_SnapshotDataRoot(self): return self.SnapshotDataRoot
def set_SnapshotDataRoot(self, SnapshotDataRoot): self.SnapshotDataRoot = SnapshotDataRoot
def get_SuspendDataRoot(self): return self.SuspendDataRoot
def set_SuspendDataRoot(self, SuspendDataRoot): self.SuspendDataRoot = SuspendDataRoot
def get_SwapFileDataRoot(self): return self.SwapFileDataRoot
def set_SwapFileDataRoot(self, SwapFileDataRoot): self.SwapFileDataRoot = SwapFileDataRoot
def get_VirtualSystemIdentifier(self): return self.VirtualSystemIdentifier
def set_VirtualSystemIdentifier(self, VirtualSystemIdentifier): self.VirtualSystemIdentifier = VirtualSystemIdentifier
def get_VirtualSystemType(self): return self.VirtualSystemType
def set_VirtualSystemType(self, VirtualSystemType): self.VirtualSystemType = VirtualSystemType
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CIM_VirtualSystemSettingData_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type', fromsubclass_=False):
if self.AutomaticRecoveryAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticRecoveryAction>%s</%sAutomaticRecoveryAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticRecoveryAction, input_name='AutomaticRecoveryAction'), namespace_))
if self.AutomaticShutdownAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticShutdownAction>%s</%sAutomaticShutdownAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticShutdownAction, input_name='AutomaticShutdownAction'), namespace_))
if self.AutomaticStartupAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticStartupAction>%s</%sAutomaticStartupAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticStartupAction, input_name='AutomaticStartupAction'), namespace_))
if self.AutomaticStartupActionDelay is not None:
self.AutomaticStartupActionDelay.export(outfile, level, namespace_, name_='AutomaticStartupActionDelay')
if self.AutomaticStartupActionSequenceNumber is not None:
self.AutomaticStartupActionSequenceNumber.export(outfile, level, namespace_, name_='AutomaticStartupActionSequenceNumber')
if self.Caption is not None:
self.Caption.export(outfile, level, namespace_, name_='Caption')
if self.ConfigurationDataRoot is not None:
self.ConfigurationDataRoot.export(outfile, level, namespace_, name_='ConfigurationDataRoot')
if self.ConfigurationFile is not None:
self.ConfigurationFile.export(outfile, level, namespace_, name_='ConfigurationFile')
if self.ConfigurationID is not None:
self.ConfigurationID.export(outfile, level, namespace_, name_='ConfigurationID')
if self.CreationTime is not None:
self.CreationTime.export(outfile, level, namespace_, name_='CreationTime')
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
if self.ElementName is not None:
self.ElementName.export(outfile, level, namespace_, name_='ElementName', )
if self.InstanceID is not None:
self.InstanceID.export(outfile, level, namespace_, name_='InstanceID', )
if self.LogDataRoot is not None:
self.LogDataRoot.export(outfile, level, namespace_, name_='LogDataRoot')
for Notes_ in self.Notes:
Notes_.export(outfile, level, namespace_, name_='Notes')
if self.RecoveryFile is not None:
self.RecoveryFile.export(outfile, level, namespace_, name_='RecoveryFile')
if self.SnapshotDataRoot is not None:
self.SnapshotDataRoot.export(outfile, level, namespace_, name_='SnapshotDataRoot')
if self.SuspendDataRoot is not None:
self.SuspendDataRoot.export(outfile, level, namespace_, name_='SuspendDataRoot')
if self.SwapFileDataRoot is not None:
self.SwapFileDataRoot.export(outfile, level, namespace_, name_='SwapFileDataRoot')
if self.VirtualSystemIdentifier is not None:
self.VirtualSystemIdentifier.export(outfile, level, namespace_, name_='VirtualSystemIdentifier')
if self.VirtualSystemType is not None:
self.VirtualSystemType.export(outfile, level, namespace_, name_='VirtualSystemType')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.AutomaticRecoveryAction is not None or
self.AutomaticShutdownAction is not None or
self.AutomaticStartupAction is not None or
self.AutomaticStartupActionDelay is not None or
self.AutomaticStartupActionSequenceNumber is not None or
self.Caption is not None or
self.ConfigurationDataRoot is not None or
self.ConfigurationFile is not None or
self.ConfigurationID is not None or
self.CreationTime is not None or
self.Description is not None or
self.ElementName is not None or
self.InstanceID is not None or
self.LogDataRoot is not None or
self.Notes or
self.RecoveryFile is not None or
self.SnapshotDataRoot is not None or
self.SuspendDataRoot is not None or
self.SwapFileDataRoot is not None or
self.VirtualSystemIdentifier is not None or
self.VirtualSystemType is not None or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='CIM_VirtualSystemSettingData_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.AutomaticRecoveryAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticRecoveryAction=%d,\n' % self.AutomaticRecoveryAction)
if self.AutomaticShutdownAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticShutdownAction=%d,\n' % self.AutomaticShutdownAction)
if self.AutomaticStartupAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupAction=%d,\n' % self.AutomaticStartupAction)
if self.AutomaticStartupActionDelay is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupActionDelay=model_.AutomaticStartupActionDelay(\n')
self.AutomaticStartupActionDelay.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticStartupActionSequenceNumber is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupActionSequenceNumber=model_.AutomaticStartupActionSequenceNumber(\n')
self.AutomaticStartupActionSequenceNumber.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Caption is not None:
showIndent(outfile, level)
outfile.write('Caption=model_.Caption(\n')
self.Caption.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationDataRoot is not None:
showIndent(outfile, level)
outfile.write('ConfigurationDataRoot=model_.ConfigurationDataRoot(\n')
self.ConfigurationDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationFile is not None:
showIndent(outfile, level)
outfile.write('ConfigurationFile=model_.ConfigurationFile(\n')
self.ConfigurationFile.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationID is not None:
showIndent(outfile, level)
outfile.write('ConfigurationID=model_.ConfigurationID(\n')
self.ConfigurationID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.CreationTime is not None:
showIndent(outfile, level)
outfile.write('CreationTime=model_.CreationTime(\n')
self.CreationTime.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Description(\n')
self.Description.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ElementName is not None:
showIndent(outfile, level)
outfile.write('ElementName=model_.ElementName(\n')
self.ElementName.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.InstanceID is not None:
showIndent(outfile, level)
outfile.write('InstanceID=model_.InstanceID(\n')
self.InstanceID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.LogDataRoot is not None:
showIndent(outfile, level)
outfile.write('LogDataRoot=model_.LogDataRoot(\n')
self.LogDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Notes=[\n')
level += 1
for Notes_ in self.Notes:
showIndent(outfile, level)
outfile.write('model_.Notes(\n')
Notes_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.RecoveryFile is not None:
showIndent(outfile, level)
outfile.write('RecoveryFile=model_.RecoveryFile(\n')
self.RecoveryFile.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SnapshotDataRoot is not None:
showIndent(outfile, level)
outfile.write('SnapshotDataRoot=model_.SnapshotDataRoot(\n')
self.SnapshotDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SuspendDataRoot is not None:
showIndent(outfile, level)
outfile.write('SuspendDataRoot=model_.SuspendDataRoot(\n')
self.SuspendDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SwapFileDataRoot is not None:
showIndent(outfile, level)
outfile.write('SwapFileDataRoot=model_.SwapFileDataRoot(\n')
self.SwapFileDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualSystemIdentifier is not None:
showIndent(outfile, level)
outfile.write('VirtualSystemIdentifier=model_.VirtualSystemIdentifier(\n')
self.VirtualSystemIdentifier.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualSystemType is not None:
showIndent(outfile, level)
outfile.write('VirtualSystemType=model_.VirtualSystemType(\n')
self.VirtualSystemType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'AutomaticRecoveryAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticRecoveryAction')
self.AutomaticRecoveryAction = ival_
self.validate_AutomaticRecoveryAction(self.AutomaticRecoveryAction) # validate type AutomaticRecoveryAction
elif nodeName_ == 'AutomaticShutdownAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticShutdownAction')
self.AutomaticShutdownAction = ival_
self.validate_AutomaticShutdownAction(self.AutomaticShutdownAction) # validate type AutomaticShutdownAction
elif nodeName_ == 'AutomaticStartupAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticStartupAction')
self.AutomaticStartupAction = ival_
self.validate_AutomaticStartupAction(self.AutomaticStartupAction) # validate type AutomaticStartupAction
elif nodeName_ == 'AutomaticStartupActionDelay':
obj_ = cimDateTime.factory()
obj_.build(child_)
self.set_AutomaticStartupActionDelay(obj_)
elif nodeName_ == 'AutomaticStartupActionSequenceNumber':
obj_ = cimUnsignedShort.factory()
obj_.build(child_)
self.set_AutomaticStartupActionSequenceNumber(obj_)
elif nodeName_ == 'Caption':
obj_ = Caption.factory()
obj_.build(child_)
self.set_Caption(obj_)
elif nodeName_ == 'ConfigurationDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationDataRoot(obj_)
elif nodeName_ == 'ConfigurationFile':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationFile(obj_)
elif nodeName_ == 'ConfigurationID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationID(obj_)
elif nodeName_ == 'CreationTime':
obj_ = cimDateTime.factory()
obj_.build(child_)
self.set_CreationTime(obj_)
elif nodeName_ == 'Description':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'ElementName':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ElementName(obj_)
elif nodeName_ == 'InstanceID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_InstanceID(obj_)
elif nodeName_ == 'LogDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_LogDataRoot(obj_)
elif nodeName_ == 'Notes':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Notes.append(obj_)
elif nodeName_ == 'RecoveryFile':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_RecoveryFile(obj_)
elif nodeName_ == 'SnapshotDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SnapshotDataRoot(obj_)
elif nodeName_ == 'SuspendDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SuspendDataRoot(obj_)
elif nodeName_ == 'SwapFileDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SwapFileDataRoot(obj_)
elif nodeName_ == 'VirtualSystemIdentifier':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualSystemIdentifier(obj_)
elif nodeName_ == 'VirtualSystemType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualSystemType(obj_)
else:
obj_ = self.gds_build_any(child_, 'CIM_VirtualSystemSettingData_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class CIM_VirtualSystemSettingData_Type
class CIM_ResourceAllocationSettingData_Type(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Address=None, AddressOnParent=None, AllocationUnits=None, AutomaticAllocation=None, AutomaticDeallocation=None, Caption=None, Connection=None, ConsumerVisibility=None, Description=None, ElementName=None, HostResource=None, InstanceID=None, Limit=None, MappingBehavior=None, OtherResourceType=None, Parent=None, PoolID=None, Reservation=None, ResourceSubType=None, ResourceType=None, VirtualQuantity=None, VirtualQuantityUnits=None, Weight=None, anytypeobjs_=None, extensiontype_=None):
self.Address = Address
self.AddressOnParent = AddressOnParent
self.AllocationUnits = AllocationUnits
self.AutomaticAllocation = AutomaticAllocation
self.AutomaticDeallocation = AutomaticDeallocation
self.Caption = Caption
if Connection is None:
self.Connection = []
else:
self.Connection = Connection
self.ConsumerVisibility = ConsumerVisibility
self.Description = Description
self.ElementName = ElementName
if HostResource is None:
self.HostResource = []
else:
self.HostResource = HostResource
self.InstanceID = InstanceID
self.Limit = Limit
self.MappingBehavior = MappingBehavior
self.OtherResourceType = OtherResourceType
self.Parent = Parent
self.PoolID = PoolID
self.Reservation = Reservation
self.ResourceSubType = ResourceSubType
self.ResourceType = ResourceType
self.VirtualQuantity = VirtualQuantity
self.VirtualQuantityUnits = VirtualQuantityUnits
self.Weight = Weight
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CIM_ResourceAllocationSettingData_Type.subclass:
return CIM_ResourceAllocationSettingData_Type.subclass(*args_, **kwargs_)
else:
return CIM_ResourceAllocationSettingData_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Address(self): return self.Address
def set_Address(self, Address): self.Address = Address
def get_AddressOnParent(self): return self.AddressOnParent
def set_AddressOnParent(self, AddressOnParent): self.AddressOnParent = AddressOnParent
def get_AllocationUnits(self): return self.AllocationUnits
def set_AllocationUnits(self, AllocationUnits): self.AllocationUnits = AllocationUnits
def get_AutomaticAllocation(self): return self.AutomaticAllocation
def set_AutomaticAllocation(self, AutomaticAllocation): self.AutomaticAllocation = AutomaticAllocation
def get_AutomaticDeallocation(self): return self.AutomaticDeallocation
def set_AutomaticDeallocation(self, AutomaticDeallocation): self.AutomaticDeallocation = AutomaticDeallocation
def get_Caption(self): return self.Caption
def set_Caption(self, Caption): self.Caption = Caption
def get_Connection(self): return self.Connection
def set_Connection(self, Connection): self.Connection = Connection
def add_Connection(self, value): self.Connection.append(value)
def insert_Connection(self, index, value): self.Connection[index] = value
def get_ConsumerVisibility(self): return self.ConsumerVisibility
def set_ConsumerVisibility(self, ConsumerVisibility): self.ConsumerVisibility = ConsumerVisibility
def validate_ConsumerVisibility(self, value):
# Validate type ConsumerVisibility, a restriction on xs:unsignedShort.
pass
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_ElementName(self): return self.ElementName
def set_ElementName(self, ElementName): self.ElementName = ElementName
def get_HostResource(self): return self.HostResource
def set_HostResource(self, HostResource): self.HostResource = HostResource
def add_HostResource(self, value): self.HostResource.append(value)
def insert_HostResource(self, index, value): self.HostResource[index] = value
def get_InstanceID(self): return self.InstanceID
def set_InstanceID(self, InstanceID): self.InstanceID = InstanceID
def get_Limit(self): return self.Limit
def set_Limit(self, Limit): self.Limit = Limit
def get_MappingBehavior(self): return self.MappingBehavior
def set_MappingBehavior(self, MappingBehavior): self.MappingBehavior = MappingBehavior
def validate_MappingBehavior(self, value):
# Validate type MappingBehavior, a restriction on xs:unsignedShort.
pass
def get_OtherResourceType(self): return self.OtherResourceType
def set_OtherResourceType(self, OtherResourceType): self.OtherResourceType = OtherResourceType
def get_Parent(self): return self.Parent
def set_Parent(self, Parent): self.Parent = Parent
def get_PoolID(self): return self.PoolID
def set_PoolID(self, PoolID): self.PoolID = PoolID
def get_Reservation(self): return self.Reservation
def set_Reservation(self, Reservation): self.Reservation = Reservation
def get_ResourceSubType(self): return self.ResourceSubType
def set_ResourceSubType(self, ResourceSubType): self.ResourceSubType = ResourceSubType
def get_ResourceType(self): return self.ResourceType
def set_ResourceType(self, ResourceType): self.ResourceType = ResourceType
def validate_ResourceType(self, value):
# Validate type ResourceType, a restriction on xs:unsignedShort.
pass
def get_VirtualQuantity(self): return self.VirtualQuantity
def set_VirtualQuantity(self, VirtualQuantity): self.VirtualQuantity = VirtualQuantity
def get_VirtualQuantityUnits(self): return self.VirtualQuantityUnits
def set_VirtualQuantityUnits(self, VirtualQuantityUnits): self.VirtualQuantityUnits = VirtualQuantityUnits
def get_Weight(self): return self.Weight
def set_Weight(self, Weight): self.Weight = Weight
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CIM_ResourceAllocationSettingData_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type', fromsubclass_=False):
if self.Address is not None:
self.Address.export(outfile, level, namespace_, name_='Address')
if self.AddressOnParent is not None:
self.AddressOnParent.export(outfile, level, namespace_, name_='AddressOnParent')
if self.AllocationUnits is not None:
self.AllocationUnits.export(outfile, level, namespace_, name_='AllocationUnits')
if self.AutomaticAllocation is not None:
self.AutomaticAllocation.export(outfile, level, namespace_, name_='AutomaticAllocation')
if self.AutomaticDeallocation is not None:
self.AutomaticDeallocation.export(outfile, level, namespace_, name_='AutomaticDeallocation')
if self.Caption is not None:
self.Caption.export(outfile, level, namespace_, name_='Caption')
for Connection_ in self.Connection:
Connection_.export(outfile, level, namespace_, name_='Connection')
if self.ConsumerVisibility is not None:
showIndent(outfile, level)
outfile.write('<%sConsumerVisibility>%s</%sConsumerVisibility>\n' % (namespace_, self.gds_format_integer(self.ConsumerVisibility, input_name='ConsumerVisibility'), namespace_))
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
if self.ElementName is not None:
self.ElementName.export(outfile, level, namespace_, name_='ElementName', )
for HostResource_ in self.HostResource:
HostResource_.export(outfile, level, namespace_, name_='HostResource')
if self.InstanceID is not None:
self.InstanceID.export(outfile, level, namespace_, name_='InstanceID', )
if self.Limit is not None:
self.Limit.export(outfile, level, namespace_, name_='Limit')
if self.MappingBehavior is not None:
showIndent(outfile, level)
outfile.write('<%sMappingBehavior>%s</%sMappingBehavior>\n' % (namespace_, self.gds_format_integer(self.MappingBehavior, input_name='MappingBehavior'), namespace_))
if self.OtherResourceType is not None:
self.OtherResourceType.export(outfile, level, namespace_, name_='OtherResourceType')
if self.Parent is not None:
self.Parent.export(outfile, level, namespace_, name_='Parent')
if self.PoolID is not None:
self.PoolID.export(outfile, level, namespace_, name_='PoolID')
if self.Reservation is not None:
self.Reservation.export(outfile, level, namespace_, name_='Reservation')
if self.ResourceSubType is not None:
self.ResourceSubType.export(outfile, level, namespace_, name_='ResourceSubType')
if self.ResourceType is not None:
showIndent(outfile, level)
outfile.write('<%sResourceType>%s</%sResourceType>\n' % (namespace_, self.gds_format_integer(self.ResourceType, input_name='ResourceType'), namespace_))
if self.VirtualQuantity is not None:
self.VirtualQuantity.export(outfile, level, namespace_, name_='VirtualQuantity')
if self.VirtualQuantityUnits is not None:
self.VirtualQuantityUnits.export(outfile, level, namespace_, name_='VirtualQuantityUnits')
if self.Weight is not None:
self.Weight.export(outfile, level, namespace_, name_='Weight')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Address is not None or
self.AddressOnParent is not None or
self.AllocationUnits is not None or
self.AutomaticAllocation is not None or
self.AutomaticDeallocation is not None or
self.Caption is not None or
self.Connection or
self.ConsumerVisibility is not None or
self.Description is not None or
self.ElementName is not None or
self.HostResource or
self.InstanceID is not None or
self.Limit is not None or
self.MappingBehavior is not None or
self.OtherResourceType is not None or
self.Parent is not None or
self.PoolID is not None or
self.Reservation is not None or
self.ResourceSubType is not None or
self.ResourceType is not None or
self.VirtualQuantity is not None or
self.VirtualQuantityUnits is not None or
self.Weight is not None or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='CIM_ResourceAllocationSettingData_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Address is not None:
showIndent(outfile, level)
outfile.write('Address=model_.Address(\n')
self.Address.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AddressOnParent is not None:
showIndent(outfile, level)
outfile.write('AddressOnParent=model_.AddressOnParent(\n')
self.AddressOnParent.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AllocationUnits is not None:
showIndent(outfile, level)
outfile.write('AllocationUnits=model_.AllocationUnits(\n')
self.AllocationUnits.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticAllocation is not None:
showIndent(outfile, level)
outfile.write('AutomaticAllocation=model_.AutomaticAllocation(\n')
self.AutomaticAllocation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticDeallocation is not None:
showIndent(outfile, level)
outfile.write('AutomaticDeallocation=model_.AutomaticDeallocation(\n')
self.AutomaticDeallocation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Caption is not None:
showIndent(outfile, level)
outfile.write('Caption=model_.Caption(\n')
self.Caption.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Connection=[\n')
level += 1
for Connection_ in self.Connection:
showIndent(outfile, level)
outfile.write('model_.Connection(\n')
Connection_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.ConsumerVisibility is not None:
showIndent(outfile, level)
outfile.write('ConsumerVisibility=%d,\n' % self.ConsumerVisibility)
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Description(\n')
self.Description.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ElementName is not None:
showIndent(outfile, level)
outfile.write('ElementName=model_.ElementName(\n')
self.ElementName.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('HostResource=[\n')
level += 1
for HostResource_ in self.HostResource:
showIndent(outfile, level)
outfile.write('model_.HostResource(\n')
HostResource_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.InstanceID is not None:
showIndent(outfile, level)
outfile.write('InstanceID=model_.InstanceID(\n')
self.InstanceID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Limit is not None:
showIndent(outfile, level)
outfile.write('Limit=model_.Limit(\n')
self.Limit.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.MappingBehavior is not None:
showIndent(outfile, level)
outfile.write('MappingBehavior=%d,\n' % self.MappingBehavior)
if self.OtherResourceType is not None:
showIndent(outfile, level)
outfile.write('OtherResourceType=model_.OtherResourceType(\n')
self.OtherResourceType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Parent is not None:
showIndent(outfile, level)
outfile.write('Parent=model_.Parent(\n')
self.Parent.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.PoolID is not None:
showIndent(outfile, level)
outfile.write('PoolID=model_.PoolID(\n')
self.PoolID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Reservation is not None:
showIndent(outfile, level)
outfile.write('Reservation=model_.Reservation(\n')
self.Reservation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ResourceSubType is not None:
showIndent(outfile, level)
outfile.write('ResourceSubType=model_.ResourceSubType(\n')
self.ResourceSubType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ResourceType is not None:
showIndent(outfile, level)
outfile.write('ResourceType=%d,\n' % self.ResourceType)
if self.VirtualQuantity is not None:
showIndent(outfile, level)
outfile.write('VirtualQuantity=model_.VirtualQuantity(\n')
self.VirtualQuantity.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualQuantityUnits is not None:
showIndent(outfile, level)
outfile.write('VirtualQuantityUnits=model_.VirtualQuantityUnits(\n')
self.VirtualQuantityUnits.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Weight is not None:
showIndent(outfile, level)
outfile.write('Weight=model_.Weight(\n')
self.Weight.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Address':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Address(obj_)
elif nodeName_ == 'AddressOnParent':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AddressOnParent(obj_)
elif nodeName_ == 'AllocationUnits':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AllocationUnits(obj_)
elif nodeName_ == 'AutomaticAllocation':
class_obj_ = self.get_class_obj_(child_, cimBoolean)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AutomaticAllocation(obj_)
elif nodeName_ == 'AutomaticDeallocation':
class_obj_ = self.get_class_obj_(child_, cimBoolean)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AutomaticDeallocation(obj_)
elif nodeName_ == 'Caption':
obj_ = Caption.factory()
obj_.build(child_)
self.set_Caption(obj_)
elif nodeName_ == 'Connection':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Connection.append(obj_)
elif nodeName_ == 'ConsumerVisibility':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'ConsumerVisibility')
self.ConsumerVisibility = ival_
self.validate_ConsumerVisibility(self.ConsumerVisibility) # validate type ConsumerVisibility
elif nodeName_ == 'Description':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'ElementName':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ElementName(obj_)
elif nodeName_ == 'HostResource':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.HostResource.append(obj_)
elif nodeName_ == 'InstanceID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_InstanceID(obj_)
elif nodeName_ == 'Limit':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_Limit(obj_)
elif nodeName_ == 'MappingBehavior':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'MappingBehavior')
self.MappingBehavior = ival_
self.validate_MappingBehavior(self.MappingBehavior) # validate type MappingBehavior
elif nodeName_ == 'OtherResourceType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_OtherResourceType(obj_)
elif nodeName_ == 'Parent':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Parent(obj_)
elif nodeName_ == 'PoolID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_PoolID(obj_)
elif nodeName_ == 'Reservation':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_Reservation(obj_)
elif nodeName_ == 'ResourceSubType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ResourceSubType(obj_)
elif nodeName_ == 'ResourceType':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'ResourceType')
self.ResourceType = ival_
self.validate_ResourceType(self.ResourceType) # validate type ResourceType
elif nodeName_ == 'VirtualQuantity':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_VirtualQuantity(obj_)
elif nodeName_ == 'VirtualQuantityUnits':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualQuantityUnits(obj_)
elif nodeName_ == 'Weight':
class_obj_ = self.get_class_obj_(child_, cimUnsignedInt)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Weight(obj_)
else:
obj_ = self.gds_build_any(child_, 'CIM_ResourceAllocationSettingData_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class CIM_ResourceAllocationSettingData_Type
class MsgType(GeneratedsSuper):
"""String element valueString element identifier"""
subclass = None
superclass = None
def __init__(self, msgid=None, valueOf_=None):
self.msgid = _cast(None, msgid)
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if MsgType.subclass:
return MsgType.subclass(*args_, **kwargs_)
else:
return MsgType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_msgid(self): return self.msgid
def set_msgid(self, msgid): self.msgid = msgid
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='MsgType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='MsgType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='MsgType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
outfile.write(' msgid=%s' % (self.gds_format_string(quote_attrib(self.msgid).encode(ExternalEncoding), input_name='msgid'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='MsgType', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='MsgType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
showIndent(outfile, level)
outfile.write('msgid = "%s",\n' % (self.msgid,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('msgid', node)
if value is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
self.msgid = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class MsgType
class IconType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, mimeType=None, width=None, fileRef=None, height=None):
self.mimeType = _cast(None, mimeType)
self.width = _cast(int, width)
self.fileRef = _cast(None, fileRef)
self.height = _cast(int, height)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if IconType.subclass:
return IconType.subclass(*args_, **kwargs_)
else:
return IconType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_mimeType(self): return self.mimeType
def set_mimeType(self, mimeType): self.mimeType = mimeType
def get_width(self): return self.width
def set_width(self, width): self.width = width
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_height(self): return self.height
def set_height(self, height): self.height = height
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='IconType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IconType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='IconType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.mimeType is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
outfile.write(' mimeType=%s' % (self.gds_format_string(quote_attrib(self.mimeType).encode(ExternalEncoding), input_name='mimeType'), ))
if self.width is not None and 'width' not in already_processed:
already_processed.append('width')
outfile.write(' width="%s"' % self.gds_format_integer(self.width, input_name='width'))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
if self.height is not None and 'height' not in already_processed:
already_processed.append('height')
outfile.write(' height="%s"' % self.gds_format_integer(self.height, input_name='height'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='IconType', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IconType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.mimeType is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
showIndent(outfile, level)
outfile.write('mimeType = "%s",\n' % (self.mimeType,))
if self.width is not None and 'width' not in already_processed:
already_processed.append('width')
showIndent(outfile, level)
outfile.write('width = %d,\n' % (self.width,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
if self.height is not None and 'height' not in already_processed:
already_processed.append('height')
showIndent(outfile, level)
outfile.write('height = %d,\n' % (self.height,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('mimeType', node)
if value is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
self.mimeType = value
value = find_attr_value_('width', node)
if value is not None and 'width' not in already_processed:
already_processed.append('width')
try:
self.width = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
value = find_attr_value_('height', node)
if value is not None and 'height' not in already_processed:
already_processed.append('height')
try:
self.height = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class IconType
class PropertyType(GeneratedsSuper):
"""Property identifierProperty typeA comma-separated set of type
qualifiersDetermines whether the property value is configurable
during installationDefault value for propertyDetermines whether
the property value should be obscured during deployment"""
subclass = None
superclass = None
def __init__(self, userConfigurable=False, value='', key=None, password=False, type_=None, qualifiers=None, Label=None, Description=None, Value=None):
self.userConfigurable = _cast(bool, userConfigurable)
self.value = _cast(None, value)
self.key = _cast(None, key)
self.password = _cast(bool, password)
self.type_ = _cast(None, type_)
self.qualifiers = _cast(None, qualifiers)
self.Label = Label
self.Description = Description
if Value is None:
self.Value = []
else:
self.Value = Value
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if PropertyType.subclass:
return PropertyType.subclass(*args_, **kwargs_)
else:
return PropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Label(self): return self.Label
def set_Label(self, Label): self.Label = Label
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_Value(self): return self.Value
def set_Value(self, Value): self.Value = Value
def add_Value(self, value): self.Value.append(value)
def insert_Value(self, index, value): self.Value[index] = value
def get_userConfigurable(self): return self.userConfigurable
def set_userConfigurable(self, userConfigurable): self.userConfigurable = userConfigurable
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_key(self): return self.key
def set_key(self, key): self.key = key
def get_password(self): return self.password
def set_password(self, password): self.password = password
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_qualifiers(self): return self.qualifiers
def set_qualifiers(self, qualifiers): self.qualifiers = qualifiers
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='PropertyType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PropertyType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='PropertyType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.userConfigurable is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
outfile.write(' userConfigurable="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.userConfigurable)), input_name='userConfigurable'))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), ))
if self.key is not None and 'key' not in already_processed:
already_processed.append('key')
outfile.write(' key=%s' % (self.gds_format_string(quote_attrib(self.key).encode(ExternalEncoding), input_name='key'), ))
if self.password is not None and 'password' not in already_processed:
already_processed.append('password')
outfile.write(' password="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.password)), input_name='password'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.qualifiers is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
outfile.write(' qualifiers=%s' % (self.gds_format_string(quote_attrib(self.qualifiers).encode(ExternalEncoding), input_name='qualifiers'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='PropertyType', fromsubclass_=False):
if self.Label is not None:
self.Label.export(outfile, level, namespace_, name_='Label')
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
for Value_ in self.Value:
Value_.export(outfile, level, namespace_, name_='Value')
def hasContent_(self):
if (
self.Label is not None or
self.Description is not None or
self.Value
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='PropertyType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.userConfigurable is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
showIndent(outfile, level)
outfile.write('userConfigurable = %s,\n' % (self.userConfigurable,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
if self.key is not None and 'key' not in already_processed:
already_processed.append('key')
showIndent(outfile, level)
outfile.write('key = "%s",\n' % (self.key,))
if self.password is not None and 'password' not in already_processed:
already_processed.append('password')
showIndent(outfile, level)
outfile.write('password = %s,\n' % (self.password,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
if self.qualifiers is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
showIndent(outfile, level)
outfile.write('qualifiers = "%s",\n' % (self.qualifiers,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Label is not None:
showIndent(outfile, level)
outfile.write('Label=model_.Msg_Type(\n')
self.Label.exportLiteral(outfile, level, name_='Label')
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Value=[\n')
level += 1
for Value_ in self.Value:
showIndent(outfile, level)
outfile.write('model_.PropertyConfigurationValue_Type(\n')
Value_.exportLiteral(outfile, level, name_='PropertyConfigurationValue_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('userConfigurable', node)
if value is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
if value in ('true', '1'):
self.userConfigurable = True
elif value in ('false', '0'):
self.userConfigurable = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
value = find_attr_value_('key', node)
if value is not None and 'key' not in already_processed:
already_processed.append('key')
self.key = value
value = find_attr_value_('password', node)
if value is not None and 'password' not in already_processed:
already_processed.append('password')
if value in ('true', '1'):
self.password = True
elif value in ('false', '0'):
self.password = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
value = find_attr_value_('qualifiers', node)
if value is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
self.qualifiers = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Label':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Label(obj_)
elif nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'Value':
obj_ = PropertyConfigurationValue_Type.factory()
obj_.build(child_)
self.Value.append(obj_)
# end class PropertyType
class NetworkType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, Description=None):
self.name = _cast(None, name)
self.Description = Description
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if NetworkType.subclass:
return NetworkType.subclass(*args_, **kwargs_)
else:
return NetworkType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='NetworkType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='NetworkType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='NetworkType', fromsubclass_=False):
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
def hasContent_(self):
if (
self.Description is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='NetworkType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
showIndent(outfile, level)
outfile.write('name = "%s",\n' % (self.name,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.append('name')
self.name = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
# end class NetworkType
class ItemType(GeneratedsSuper):
"""Unique identifier of the content (within a VirtualSystemCollection)
Startup order. Entities are started up starting with lower-
numbers first, starting from 0. Items with same order identifier
may be started up concurrently or in any order. The order is
reversed for shutdown.Delay in seconds to wait for power on to
completeResumes power-on sequence if guest software reports
okDelay in seconds to wait for power off to completeStart action
to use, valid values are: 'powerOn', 'none' Stop action to use,
valid values are: ''powerOff' , 'guestShutdown', 'none'"""
subclass = None
superclass = None
def __init__(self, stopDelay=0, order=None, startAction='powerOn', startDelay=0, waitingForGuest=False, stopAction='powerOff', id=None):
self.stopDelay = _cast(int, stopDelay)
self.order = _cast(int, order)
self.startAction = _cast(None, startAction)
self.startDelay = _cast(int, startDelay)
self.waitingForGuest = _cast(bool, waitingForGuest)
self.stopAction = _cast(None, stopAction)
self.id = _cast(None, id)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if ItemType.subclass:
return ItemType.subclass(*args_, **kwargs_)
else:
return ItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_stopDelay(self): return self.stopDelay
def set_stopDelay(self, stopDelay): self.stopDelay = stopDelay
def get_order(self): return self.order
def set_order(self, order): self.order = order
def get_startAction(self): return self.startAction
def set_startAction(self, startAction): self.startAction = startAction
def get_startDelay(self): return self.startDelay
def set_startDelay(self, startDelay): self.startDelay = startDelay
def get_waitingForGuest(self): return self.waitingForGuest
def set_waitingForGuest(self, waitingForGuest): self.waitingForGuest = waitingForGuest
def get_stopAction(self): return self.stopAction
def set_stopAction(self, stopAction): self.stopAction = stopAction
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='ItemType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ItemType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ItemType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.stopDelay is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
outfile.write(' stopDelay="%s"' % self.gds_format_integer(self.stopDelay, input_name='stopDelay'))
if self.order is not None and 'order' not in already_processed:
already_processed.append('order')
outfile.write(' order="%s"' % self.gds_format_integer(self.order, input_name='order'))
if self.startAction is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
outfile.write(' startAction=%s' % (self.gds_format_string(quote_attrib(self.startAction).encode(ExternalEncoding), input_name='startAction'), ))
if self.startDelay is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
outfile.write(' startDelay="%s"' % self.gds_format_integer(self.startDelay, input_name='startDelay'))
if self.waitingForGuest is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
outfile.write(' waitingForGuest="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.waitingForGuest)), input_name='waitingForGuest'))
if self.stopAction is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
outfile.write(' stopAction=%s' % (self.gds_format_string(quote_attrib(self.stopAction).encode(ExternalEncoding), input_name='stopAction'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ItemType', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ItemType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.stopDelay is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
showIndent(outfile, level)
outfile.write('stopDelay = %d,\n' % (self.stopDelay,))
if self.order is not None and 'order' not in already_processed:
already_processed.append('order')
showIndent(outfile, level)
outfile.write('order = %d,\n' % (self.order,))
if self.startAction is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
showIndent(outfile, level)
outfile.write('startAction = "%s",\n' % (self.startAction,))
if self.startDelay is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
showIndent(outfile, level)
outfile.write('startDelay = %d,\n' % (self.startDelay,))
if self.waitingForGuest is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
showIndent(outfile, level)
outfile.write('waitingForGuest = %s,\n' % (self.waitingForGuest,))
if self.stopAction is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
showIndent(outfile, level)
outfile.write('stopAction = "%s",\n' % (self.stopAction,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('stopDelay', node)
if value is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
try:
self.stopDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('order', node)
if value is not None and 'order' not in already_processed:
already_processed.append('order')
try:
self.order = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('startAction', node)
if value is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
self.startAction = value
value = find_attr_value_('startDelay', node)
if value is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
try:
self.startDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('waitingForGuest', node)
if value is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
if value in ('true', '1'):
self.waitingForGuest = True
elif value in ('false', '0'):
self.waitingForGuest = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('stopAction', node)
if value is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
self.stopAction = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ItemType
class ConfigurationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, default=False, id=None, Label=None, Description=None):
self.default = _cast(bool, default)
self.id = _cast(None, id)
self.Label = Label
self.Description = Description
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if ConfigurationType.subclass:
return ConfigurationType.subclass(*args_, **kwargs_)
else:
return ConfigurationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Label(self): return self.Label
def set_Label(self, Label): self.Label = Label
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_default(self): return self.default
def set_default(self, default): self.default = default
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='ConfigurationType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConfigurationType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ConfigurationType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.default is not None and 'default' not in already_processed:
already_processed.append('default')
outfile.write(' default="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.default)), input_name='default'))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ConfigurationType', fromsubclass_=False):
if self.Label is not None:
self.Label.export(outfile, level, namespace_, name_='Label', )
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description', )
def hasContent_(self):
if (
self.Label is not None or
self.Description is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ConfigurationType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.default is not None and 'default' not in already_processed:
already_processed.append('default')
showIndent(outfile, level)
outfile.write('default = %s,\n' % (self.default,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Label is not None:
showIndent(outfile, level)
outfile.write('Label=model_.Msg_Type(\n')
self.Label.exportLiteral(outfile, level, name_='Label')
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('default', node)
if value is not None and 'default' not in already_processed:
already_processed.append('default')
if value in ('true', '1'):
self.default = True
elif value in ('false', '0'):
self.default = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Label':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Label(obj_)
elif nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
# end class ConfigurationType
class RASD_Type(CIM_ResourceAllocationSettingData_Type):
"""Wrapper for CIM_ResourceAllocationSettingData_TypeDetermines whether
import should fail if entry is not understoodConfiguration from
DeploymentOptionSection this entry is valid forStates that this
entry is a range marker"""
subclass = None
superclass = CIM_ResourceAllocationSettingData_Type
def __init__(self, Address=None, AddressOnParent=None, AllocationUnits=None, AutomaticAllocation=None, AutomaticDeallocation=None, Caption=None, Connection=None, ConsumerVisibility=None, Description=None, ElementName=None, HostResource=None, InstanceID=None, Limit=None, MappingBehavior=None, OtherResourceType=None, Parent=None, PoolID=None, Reservation=None, ResourceSubType=None, ResourceType=None, VirtualQuantity=None, VirtualQuantityUnits=None, Weight=None, anytypeobjs_=None, required=True, bound=None, configuration=None):
super(RASD_Type, self).__init__(Address, AddressOnParent, AllocationUnits, AutomaticAllocation, AutomaticDeallocation, Caption, Connection, ConsumerVisibility, Description, ElementName, HostResource, InstanceID, Limit, MappingBehavior, OtherResourceType, Parent, PoolID, Reservation, ResourceSubType, ResourceType, VirtualQuantity, VirtualQuantityUnits, Weight, anytypeobjs_, )
self.required = _cast(bool, required)
self.bound = _cast(None, bound)
self.configuration = _cast(None, configuration)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if RASD_Type.subclass:
return RASD_Type.subclass(*args_, **kwargs_)
else:
return RASD_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_required(self): return self.required
def set_required(self, required): self.required = required
def get_bound(self): return self.bound
def set_bound(self, bound): self.bound = bound
def get_configuration(self): return self.configuration
def set_configuration(self, configuration): self.configuration = configuration
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='RASD_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RASD_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='RASD_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(RASD_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='RASD_Type')
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
outfile.write(' required="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.required)), input_name='required'))
if self.bound is not None and 'bound' not in already_processed:
already_processed.append('bound')
outfile.write(' bound=%s' % (self.gds_format_string(quote_attrib(self.bound).encode(ExternalEncoding), input_name='bound'), ))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
outfile.write(' configuration=%s' % (self.gds_format_string(quote_attrib(self.configuration).encode(ExternalEncoding), input_name='configuration'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='RASD_Type', fromsubclass_=False):
super(RASD_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(RASD_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='RASD_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
showIndent(outfile, level)
outfile.write('required = %s,\n' % (self.required,))
if self.bound is not None and 'bound' not in already_processed:
already_processed.append('bound')
showIndent(outfile, level)
outfile.write('bound = "%s",\n' % (self.bound,))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
showIndent(outfile, level)
outfile.write('configuration = "%s",\n' % (self.configuration,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(RASD_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(RASD_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('required', node)
if value is not None and 'required' not in already_processed:
already_processed.append('required')
if value in ('true', '1'):
self.required = True
elif value in ('false', '0'):
self.required = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('bound', node)
if value is not None and 'bound' not in already_processed:
already_processed.append('bound')
self.bound = value
value = find_attr_value_('configuration', node)
if value is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
self.configuration = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(RASD_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(RASD_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class RASD_Type
class VSSD_Type(CIM_VirtualSystemSettingData_Type):
"""Wrapper for CIM_VirtualSystemSettingData_Type"""
subclass = None
superclass = CIM_VirtualSystemSettingData_Type
def __init__(self, AutomaticRecoveryAction=None, AutomaticShutdownAction=None, AutomaticStartupAction=None, AutomaticStartupActionDelay=None, AutomaticStartupActionSequenceNumber=None, Caption=None, ConfigurationDataRoot=None, ConfigurationFile=None, ConfigurationID=None, CreationTime=None, Description=None, ElementName=None, InstanceID=None, LogDataRoot=None, Notes=None, RecoveryFile=None, SnapshotDataRoot=None, SuspendDataRoot=None, SwapFileDataRoot=None, VirtualSystemIdentifier=None, VirtualSystemType=None, anytypeobjs_=None):
super(VSSD_Type, self).__init__(AutomaticRecoveryAction, AutomaticShutdownAction, AutomaticStartupAction, AutomaticStartupActionDelay, AutomaticStartupActionSequenceNumber, Caption, ConfigurationDataRoot, ConfigurationFile, ConfigurationID, CreationTime, Description, ElementName, InstanceID, LogDataRoot, Notes, RecoveryFile, SnapshotDataRoot, SuspendDataRoot, SwapFileDataRoot, VirtualSystemIdentifier, VirtualSystemType, anytypeobjs_, )
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if VSSD_Type.subclass:
return VSSD_Type.subclass(*args_, **kwargs_)
else:
return VSSD_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='VSSD_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VSSD_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VSSD_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(VSSD_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VSSD_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VSSD_Type', fromsubclass_=False):
super(VSSD_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(VSSD_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VSSD_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(VSSD_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VSSD_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(VSSD_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(VSSD_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class VSSD_Type
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout, 0, name_=rootTag,
# namespacedef_='')
return rootObj
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="Envelope",
namespacedef_='')
return rootObj
def parseLiteral(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('#from ovfenvelope import *\n\n')
sys.stdout.write('import ovfenvelope as model_\n\n')
sys.stdout.write('rootObj = model_.rootTag(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"AnnotationSection_Type",
"CIM_ResourceAllocationSettingData_Type",
"CIM_VirtualSystemSettingData_Type",
"Caption",
"ConfigurationType",
"Content_Type",
"DeploymentOptionSection_Type",
"DiskSection_Type",
"EnvelopeType",
"EulaSection_Type",
"File_Type",
"IconType",
"InstallSection_Type",
"ItemType",
"MsgType",
"Msg_Type",
"NetworkSection_Type",
"NetworkType",
"OperatingSystemSection_Type",
"ProductSection_Type",
"PropertyConfigurationValue_Type",
"PropertyType",
"RASD_Type",
"References_Type",
"ResourceAllocationSection_Type",
"Section_Type",
"StartupSection_Type",
"Strings_Type",
"VSSD_Type",
"VirtualDiskDesc_Type",
"VirtualHardwareSection_Type",
"VirtualSystemCollection_Type",
"VirtualSystem_Type",
"cimAnySimpleType",
"cimBase64Binary",
"cimBoolean",
"cimByte",
"cimChar16",
"cimDateTime",
"cimDouble",
"cimFloat",
"cimHexBinary",
"cimInt",
"cimLong",
"cimReference",
"cimShort",
"cimString",
"cimUnsignedByte",
"cimUnsignedInt",
"cimUnsignedLong",
"cimUnsignedShort",
"qualifierBoolean",
"qualifierSArray",
"qualifierSInt64",
"qualifierString",
"qualifierUInt32"
]
|
Dhandapani/gluster-ovirt
|
backend/manager/tools/engine-image-uploader/src/ovf/ovfenvelope.py
|
Python
|
apache-2.0
| 398,478
|
from citrination_client.search.pif.query.chemical.chemical_field_operation import ChemicalFieldOperation
from citrination_client.search.pif.query.core.base_object_query import BaseObjectQuery
from citrination_client.search.pif.query.core.field_operation import FieldOperation
class CompositionQuery(BaseObjectQuery):
"""
Class to query against a PIF Composition object.
"""
def __init__(self, element=None, actual_weight_percent=None, actual_atomic_percent=None,
ideal_weight_percent=None, ideal_atomic_percent=None, logic=None, tags=None,
length=None, offset=None):
"""
Constructor.
:param element: One or more :class:`ChemicalFieldOperation` operations against the element field.
:param actual_weight_percent: One or more :class:`FieldOperation` operations against the actual
weight percent field.
:param actual_atomic_percent: One or more :class:`FieldOperation` operations against the actual
atomic percent field.
:param ideal_weight_percent: One or more :class:`FieldOperation` operations against the ideal
weight percent field.
:param ideal_atomic_percent: One or more :class:`FieldOperation` operations against the ideal
atomic percent field.
:param logic: Logic for this filter. Must be equal to one of "MUST", "MUST_NOT", "SHOULD", or "OPTIONAL".
:param tags: One or more :class:`FieldOperation` operations against the tags field.
:param length: One or more :class:`FieldOperation` operations against the length field.
:param offset: One or more :class:`FieldOperation` operations against the offset field.
"""
super(CompositionQuery, self).__init__(logic=logic, tags=tags, length=length, offset=offset)
self._element = None
self.element = element
self._actual_weight_percent = None
self.actual_weight_percent = actual_weight_percent
self._actual_atomic_percent = None
self.actual_atomic_percent = actual_atomic_percent
self._ideal_weight_percent = None
self.ideal_weight_percent = ideal_weight_percent
self._ideal_atomic_percent = None
self.ideal_atomic_percent = ideal_atomic_percent
@property
def element(self):
return self._element
@element.setter
def element(self, element):
self._element = self._get_object(ChemicalFieldOperation, element)
@element.deleter
def element(self):
self._element = None
@property
def actual_weight_percent(self):
return self._actual_weight_percent
@actual_weight_percent.setter
def actual_weight_percent(self, actual_weight_percent):
self._actual_weight_percent = self._get_object(FieldOperation, actual_weight_percent)
@actual_weight_percent.deleter
def actual_weight_percent(self):
self._actual_weight_percent = None
@property
def actual_atomic_percent(self):
return self._actual_atomic_percent
@actual_atomic_percent.setter
def actual_atomic_percent(self, actual_atomic_percent):
self._actual_atomic_percent = self._get_object(FieldOperation, actual_atomic_percent)
@actual_atomic_percent.deleter
def actual_atomic_percent(self):
self._actual_atomic_percent = None
@property
def ideal_weight_percent(self):
return self._ideal_weight_percent
@ideal_weight_percent.setter
def ideal_weight_percent(self, ideal_weight_percent):
self._ideal_weight_percent = self._get_object(FieldOperation, ideal_weight_percent)
@ideal_weight_percent.deleter
def ideal_weight_percent(self):
self._ideal_weight_percent = None
@property
def ideal_atomic_percent(self):
return self._ideal_atomic_percent
@ideal_atomic_percent.setter
def ideal_atomic_percent(self, ideal_atomic_percent):
self._ideal_atomic_percent = self._get_object(FieldOperation, ideal_atomic_percent)
@ideal_atomic_percent.deleter
def ideal_atomic_percent(self):
self._ideal_atomic_percent = None
|
calfonso/python-citrination-client
|
citrination_client/search/pif/query/chemical/composition_query.py
|
Python
|
apache-2.0
| 4,113
|
#
# Copyright 2014 Telefonica Investigacion y Desarrollo, S.A.U
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for SPASSWORD checker."""
from keystone import tests
from keystone import exception
import keystone_spassword.contrib.spassword.checker
class TestPasswordChecker(tests.BaseTestCase):
def test_checker(self):
new_password = "stronger"
self.assertRaises(exception.ValidationError,
checker.strong_check_password(new_password))
|
telefonicaid/fiware-keystone-spassword
|
keystone_spassword/tests/unit/contrib/spassword/test_checker.py
|
Python
|
apache-2.0
| 1,246
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import mock
from airflow.providers.google.cloud.operators.dataflow import (
CheckJobRunning, DataflowCreateJavaJobOperator, DataflowCreatePythonJobOperator,
DataflowTemplatedJobStartOperator,
)
from airflow.version import version
TASK_ID = 'test-dataflow-operator'
JOB_NAME = 'test-dataflow-pipeline'
TEMPLATE = 'gs://dataflow-templates/wordcount/template_file'
PARAMETERS = {
'inputFile': 'gs://dataflow-samples/shakespeare/kinglear.txt',
'output': 'gs://test/output/my_output'
}
PY_FILE = 'gs://my-bucket/my-object.py'
PY_INTERPRETER = 'python3'
JAR_FILE = 'gs://my-bucket/example/test.jar'
JOB_CLASS = 'com.test.NotMain'
PY_OPTIONS = ['-m']
DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
}
DEFAULT_OPTIONS_TEMPLATE = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f'
}
ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar'}
}
TEST_VERSION = 'v{}'.format(version.replace('.', '-').replace('+', '-'))
EXPECTED_ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
POLL_SLEEP = 30
GCS_HOOK_STRING = 'airflow.providers.google.cloud.operators.dataflow.{}'
TEST_LOCATION = "custom-location"
class TestDataflowPythonOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowCreatePythonJobOperator(
task_id=TASK_ID,
py_file=PY_FILE,
job_name=JOB_NAME,
py_options=PY_OPTIONS,
dataflow_default_options=DEFAULT_OPTIONS_PYTHON,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
def test_init(self):
"""Test DataFlowPythonOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.py_file, PY_FILE)
self.assertEqual(self.dataflow.py_options, PY_OPTIONS)
self.assertEqual(self.dataflow.py_interpreter, PY_INTERPRETER)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_PYTHON)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_python_workflow.
"""
start_python_hook = dataflow_mock.return_value.start_python_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
start_python_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=expected_options,
dataflow=mock.ANY,
py_options=PY_OPTIONS,
py_interpreter=PY_INTERPRETER,
py_requirements=[],
py_system_site_packages=False,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
self.assertTrue(self.dataflow.py_file.startswith('/tmp/dataflow'))
class TestDataflowJavaOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowCreateJavaJobOperator(
task_id=TASK_ID,
jar=JAR_FILE,
job_name=JOB_NAME,
job_class=JOB_CLASS,
dataflow_default_options=DEFAULT_OPTIONS_JAVA,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
def test_init(self):
"""Test DataflowTemplateOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_JAVA)
self.assertEqual(self.dataflow.job_class, JOB_CLASS)
self.assertEqual(self.dataflow.jar, JAR_FILE)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
self.assertEqual(self.dataflow.check_if_running, CheckJobRunning.WaitForRun)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow.
"""
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = CheckJobRunning.IgnoreJob
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=None,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_job_running_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow.
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = True
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_not_called()
start_java_hook.assert_not_called()
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_job_not_running_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow with option to check if job is running
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = False
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=None,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_multiple_job_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow with option to check multiple jobs
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = False
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.multiple_jobs = True
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=True,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION
)
class TestDataflowTemplateOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowTemplatedJobStartOperator(
task_id=TASK_ID,
template=TEMPLATE,
job_name=JOB_NAME,
parameters=PARAMETERS,
options=DEFAULT_OPTIONS_TEMPLATE,
dataflow_default_options={"EXTRA_OPTION": "TEST_A"},
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
def test_exec(self, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_template_workflow.
"""
start_template_hook = dataflow_mock.return_value.start_template_dataflow
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f',
'EXTRA_OPTION': "TEST_A"
}
start_template_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=expected_options,
parameters=PARAMETERS,
dataflow_template=TEMPLATE,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
|
wooga/airflow
|
tests/providers/google/cloud/operators/test_dataflow.py
|
Python
|
apache-2.0
| 11,898
|
import wasp
def onConflict():
"""
Optional.
A conflict happened during the solving
"""
pass
def onDeletion():
"""
Optional.
The method for deleting clauses is invoked.
"""
pass
def onLearningClause(lbd, size, *lits):
"""
Optional.
When a clause is learnt.
:param lbd: the lbd value of the learnt clause
:param size: the size of the learned clause
:param lits: the literals in the learned clause
"""
pass
def onLitInImportantClause(lit):
"""
Optional.
When a literal appears in special clauses, e.g. glue clauses.
:param lit: the literal in the important clause.
"""
pass
def onLitInvolvedInConflict(lit):
"""
Optional.
When a literal is involved in the computation of the learned clause.
:param lit: the literal involved in the conflict
"""
pass
def onLoopFormula(lbd, size, *lits):
"""
Optional.
When a loop formula is learnt for an unfounded set.
:param lbd: the lbd value of the loop formula
:param size: the size of the loop formula
:param lits: the literals in the loop formula
"""
pass
def onNewClause(*clause):
"""
Optional.
All clauses left after the simplifications are sent to the heuristic using this method
:param clause: the clause
"""
def onRestart():
"""
Optional.
When the solver performs a restart.
"""
pass
def onUnfoundedSet(*unfounded_set):
"""
Optional.
When an unfounded set is found.
:param unfounded_set: all atoms in the unfounded set
"""
pass
def initFallback():
"""
Optional.
Init the activities of variables in the fallback heuristic.
:return: List of pairs (v, i), the activity variable v is associated with i.
"""
pass
def factorFallback():
"""
Optional.
Set the factor for the activities of variables in the fallback heuristic (required fallback method).
:return: list of pairs (v, f), the factor f is associated to the variable v.
"""
pass
def signFallback():
"""
Optional.
Set the preferred polarity for variables in the fallback heuristic (required fallback method).
:return: list of literals
"""
pass
def selectLiteral():
"""
Required.
This method is invoked when a choice is needed. It can return a choice and special
values for performing special actions.
Special values:
- wasp.restart() force the solver to perform a restart
- wasp.fallback(n) use the fallback heuristic for n steps (n<=0 use always fallback heuristic) -> require the presence of the method fallback() in the script
- wasp.unroll(v) unroll the truth value of the variable v
:return: wasp.choice(l), where l is a literal
"""
pass
|
alviano/wasp
|
python_libraries/heuristics/heuristic-instructions.py
|
Python
|
apache-2.0
| 2,804
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compare a txt file of predictions with gold targets from a TSV file."""
from absl import app
from absl import flags
from language.compgen.nqg.tasks import tsv_utils
from tensorflow.io import gfile
FLAGS = flags.FLAGS
flags.DEFINE_string("gold", "", "tsv file containing gold targets.")
flags.DEFINE_string("predictions", "", "txt file with predicted targets.")
def main(unused_argv):
gold_examples = tsv_utils.read_tsv(FLAGS.gold)
preds = []
with gfile.GFile(FLAGS.predictions, "r") as f:
for line in f:
preds.append(line.rstrip())
correct = 0
incorrect = 0
for pred, gold_example in zip(preds, gold_examples):
if pred == gold_example[1]:
correct += 1
else:
incorrect += 1
print("Incorrect for example %s.\nTarget: %s\nPrediction: %s" %
(gold_example[0], gold_example[1], pred))
print("correct: %s" % correct)
print("incorrect: %s" % incorrect)
print("pct: %s" % str(float(correct) / float(correct + incorrect)))
if __name__ == "__main__":
app.run(main)
|
google-research/language
|
language/compgen/nqg/tasks/compare_predictions.py
|
Python
|
apache-2.0
| 1,653
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.kms_v1.types import resources
from google.cloud.kms_v1.types import service
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from .base import KeyManagementServiceTransport, DEFAULT_CLIENT_INFO
class KeyManagementServiceGrpcTransport(KeyManagementServiceTransport):
"""gRPC backend transport for KeyManagementService.
Google Cloud Key Management Service
Manages cryptographic keys and operations using those keys.
Implements a REST model with the following objects:
- [KeyRing][google.cloud.kms.v1.KeyRing]
- [CryptoKey][google.cloud.kms.v1.CryptoKey]
- [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
- [ImportJob][google.cloud.kms.v1.ImportJob]
If you are using manual gRPC libraries, see `Using gRPC with Cloud
KMS <https://cloud.google.com/kms/docs/grpc>`__.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "cloudkms.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "cloudkms.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def list_key_rings(
self,
) -> Callable[[service.ListKeyRingsRequest], service.ListKeyRingsResponse]:
r"""Return a callable for the list key rings method over gRPC.
Lists [KeyRings][google.cloud.kms.v1.KeyRing].
Returns:
Callable[[~.ListKeyRingsRequest],
~.ListKeyRingsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_key_rings" not in self._stubs:
self._stubs["list_key_rings"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListKeyRings",
request_serializer=service.ListKeyRingsRequest.serialize,
response_deserializer=service.ListKeyRingsResponse.deserialize,
)
return self._stubs["list_key_rings"]
@property
def list_crypto_keys(
self,
) -> Callable[[service.ListCryptoKeysRequest], service.ListCryptoKeysResponse]:
r"""Return a callable for the list crypto keys method over gRPC.
Lists [CryptoKeys][google.cloud.kms.v1.CryptoKey].
Returns:
Callable[[~.ListCryptoKeysRequest],
~.ListCryptoKeysResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_crypto_keys" not in self._stubs:
self._stubs["list_crypto_keys"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListCryptoKeys",
request_serializer=service.ListCryptoKeysRequest.serialize,
response_deserializer=service.ListCryptoKeysResponse.deserialize,
)
return self._stubs["list_crypto_keys"]
@property
def list_crypto_key_versions(
self,
) -> Callable[
[service.ListCryptoKeyVersionsRequest], service.ListCryptoKeyVersionsResponse
]:
r"""Return a callable for the list crypto key versions method over gRPC.
Lists [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion].
Returns:
Callable[[~.ListCryptoKeyVersionsRequest],
~.ListCryptoKeyVersionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_crypto_key_versions" not in self._stubs:
self._stubs["list_crypto_key_versions"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListCryptoKeyVersions",
request_serializer=service.ListCryptoKeyVersionsRequest.serialize,
response_deserializer=service.ListCryptoKeyVersionsResponse.deserialize,
)
return self._stubs["list_crypto_key_versions"]
@property
def list_import_jobs(
self,
) -> Callable[[service.ListImportJobsRequest], service.ListImportJobsResponse]:
r"""Return a callable for the list import jobs method over gRPC.
Lists [ImportJobs][google.cloud.kms.v1.ImportJob].
Returns:
Callable[[~.ListImportJobsRequest],
~.ListImportJobsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_import_jobs" not in self._stubs:
self._stubs["list_import_jobs"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListImportJobs",
request_serializer=service.ListImportJobsRequest.serialize,
response_deserializer=service.ListImportJobsResponse.deserialize,
)
return self._stubs["list_import_jobs"]
@property
def get_key_ring(self) -> Callable[[service.GetKeyRingRequest], resources.KeyRing]:
r"""Return a callable for the get key ring method over gRPC.
Returns metadata for a given
[KeyRing][google.cloud.kms.v1.KeyRing].
Returns:
Callable[[~.GetKeyRingRequest],
~.KeyRing]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_key_ring" not in self._stubs:
self._stubs["get_key_ring"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetKeyRing",
request_serializer=service.GetKeyRingRequest.serialize,
response_deserializer=resources.KeyRing.deserialize,
)
return self._stubs["get_key_ring"]
@property
def get_crypto_key(
self,
) -> Callable[[service.GetCryptoKeyRequest], resources.CryptoKey]:
r"""Return a callable for the get crypto key method over gRPC.
Returns metadata for a given
[CryptoKey][google.cloud.kms.v1.CryptoKey], as well as its
[primary][google.cloud.kms.v1.CryptoKey.primary]
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
Returns:
Callable[[~.GetCryptoKeyRequest],
~.CryptoKey]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_crypto_key" not in self._stubs:
self._stubs["get_crypto_key"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetCryptoKey",
request_serializer=service.GetCryptoKeyRequest.serialize,
response_deserializer=resources.CryptoKey.deserialize,
)
return self._stubs["get_crypto_key"]
@property
def get_crypto_key_version(
self,
) -> Callable[[service.GetCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the get crypto key version method over gRPC.
Returns metadata for a given
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
Returns:
Callable[[~.GetCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_crypto_key_version" not in self._stubs:
self._stubs["get_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetCryptoKeyVersion",
request_serializer=service.GetCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["get_crypto_key_version"]
@property
def get_public_key(
self,
) -> Callable[[service.GetPublicKeyRequest], resources.PublicKey]:
r"""Return a callable for the get public key method over gRPC.
Returns the public key for the given
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. The
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must
be
[ASYMMETRIC_SIGN][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_SIGN]
or
[ASYMMETRIC_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_DECRYPT].
Returns:
Callable[[~.GetPublicKeyRequest],
~.PublicKey]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_public_key" not in self._stubs:
self._stubs["get_public_key"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetPublicKey",
request_serializer=service.GetPublicKeyRequest.serialize,
response_deserializer=resources.PublicKey.deserialize,
)
return self._stubs["get_public_key"]
@property
def get_import_job(
self,
) -> Callable[[service.GetImportJobRequest], resources.ImportJob]:
r"""Return a callable for the get import job method over gRPC.
Returns metadata for a given
[ImportJob][google.cloud.kms.v1.ImportJob].
Returns:
Callable[[~.GetImportJobRequest],
~.ImportJob]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_import_job" not in self._stubs:
self._stubs["get_import_job"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetImportJob",
request_serializer=service.GetImportJobRequest.serialize,
response_deserializer=resources.ImportJob.deserialize,
)
return self._stubs["get_import_job"]
@property
def create_key_ring(
self,
) -> Callable[[service.CreateKeyRingRequest], resources.KeyRing]:
r"""Return a callable for the create key ring method over gRPC.
Create a new [KeyRing][google.cloud.kms.v1.KeyRing] in a given
Project and Location.
Returns:
Callable[[~.CreateKeyRingRequest],
~.KeyRing]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_key_ring" not in self._stubs:
self._stubs["create_key_ring"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateKeyRing",
request_serializer=service.CreateKeyRingRequest.serialize,
response_deserializer=resources.KeyRing.deserialize,
)
return self._stubs["create_key_ring"]
@property
def create_crypto_key(
self,
) -> Callable[[service.CreateCryptoKeyRequest], resources.CryptoKey]:
r"""Return a callable for the create crypto key method over gRPC.
Create a new [CryptoKey][google.cloud.kms.v1.CryptoKey] within a
[KeyRing][google.cloud.kms.v1.KeyRing].
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] and
[CryptoKey.version_template.algorithm][google.cloud.kms.v1.CryptoKeyVersionTemplate.algorithm]
are required.
Returns:
Callable[[~.CreateCryptoKeyRequest],
~.CryptoKey]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_crypto_key" not in self._stubs:
self._stubs["create_crypto_key"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateCryptoKey",
request_serializer=service.CreateCryptoKeyRequest.serialize,
response_deserializer=resources.CryptoKey.deserialize,
)
return self._stubs["create_crypto_key"]
@property
def create_crypto_key_version(
self,
) -> Callable[[service.CreateCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the create crypto key version method over gRPC.
Create a new
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in a
[CryptoKey][google.cloud.kms.v1.CryptoKey].
The server will assign the next sequential id. If unset,
[state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set
to
[ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED].
Returns:
Callable[[~.CreateCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_crypto_key_version" not in self._stubs:
self._stubs["create_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateCryptoKeyVersion",
request_serializer=service.CreateCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["create_crypto_key_version"]
@property
def import_crypto_key_version(
self,
) -> Callable[[service.ImportCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the import crypto key version method over gRPC.
Import wrapped key material into a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
All requests must specify a
[CryptoKey][google.cloud.kms.v1.CryptoKey]. If a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] is
additionally specified in the request, key material will be
reimported into that version. Otherwise, a new version will be
created, and will be assigned the next sequential id within the
[CryptoKey][google.cloud.kms.v1.CryptoKey].
Returns:
Callable[[~.ImportCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "import_crypto_key_version" not in self._stubs:
self._stubs["import_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ImportCryptoKeyVersion",
request_serializer=service.ImportCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["import_crypto_key_version"]
@property
def create_import_job(
self,
) -> Callable[[service.CreateImportJobRequest], resources.ImportJob]:
r"""Return a callable for the create import job method over gRPC.
Create a new [ImportJob][google.cloud.kms.v1.ImportJob] within a
[KeyRing][google.cloud.kms.v1.KeyRing].
[ImportJob.import_method][google.cloud.kms.v1.ImportJob.import_method]
is required.
Returns:
Callable[[~.CreateImportJobRequest],
~.ImportJob]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_import_job" not in self._stubs:
self._stubs["create_import_job"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateImportJob",
request_serializer=service.CreateImportJobRequest.serialize,
response_deserializer=resources.ImportJob.deserialize,
)
return self._stubs["create_import_job"]
@property
def update_crypto_key(
self,
) -> Callable[[service.UpdateCryptoKeyRequest], resources.CryptoKey]:
r"""Return a callable for the update crypto key method over gRPC.
Update a [CryptoKey][google.cloud.kms.v1.CryptoKey].
Returns:
Callable[[~.UpdateCryptoKeyRequest],
~.CryptoKey]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_crypto_key" not in self._stubs:
self._stubs["update_crypto_key"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKey",
request_serializer=service.UpdateCryptoKeyRequest.serialize,
response_deserializer=resources.CryptoKey.deserialize,
)
return self._stubs["update_crypto_key"]
@property
def update_crypto_key_version(
self,
) -> Callable[[service.UpdateCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the update crypto key version method over gRPC.
Update a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s
metadata.
[state][google.cloud.kms.v1.CryptoKeyVersion.state] may be
changed between
[ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED]
and
[DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED]
using this method. See
[DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion]
and
[RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion]
to move between other states.
Returns:
Callable[[~.UpdateCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_crypto_key_version" not in self._stubs:
self._stubs["update_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyVersion",
request_serializer=service.UpdateCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["update_crypto_key_version"]
@property
def update_crypto_key_primary_version(
self,
) -> Callable[[service.UpdateCryptoKeyPrimaryVersionRequest], resources.CryptoKey]:
r"""Return a callable for the update crypto key primary
version method over gRPC.
Update the version of a
[CryptoKey][google.cloud.kms.v1.CryptoKey] that will be used in
[Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt].
Returns an error if called on a key whose purpose is not
[ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
Returns:
Callable[[~.UpdateCryptoKeyPrimaryVersionRequest],
~.CryptoKey]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_crypto_key_primary_version" not in self._stubs:
self._stubs[
"update_crypto_key_primary_version"
] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyPrimaryVersion",
request_serializer=service.UpdateCryptoKeyPrimaryVersionRequest.serialize,
response_deserializer=resources.CryptoKey.deserialize,
)
return self._stubs["update_crypto_key_primary_version"]
@property
def destroy_crypto_key_version(
self,
) -> Callable[[service.DestroyCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the destroy crypto key version method over gRPC.
Schedule a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] for
destruction.
Upon calling this method,
[CryptoKeyVersion.state][google.cloud.kms.v1.CryptoKeyVersion.state]
will be set to
[DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED],
and
[destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time]
will be set to the time
[destroy_scheduled_duration][google.cloud.kms.v1.CryptoKey.destroy_scheduled_duration]
in the future. At that time, the
[state][google.cloud.kms.v1.CryptoKeyVersion.state] will
automatically change to
[DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED],
and the key material will be irrevocably destroyed.
Before the
[destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time]
is reached,
[RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion]
may be called to reverse the process.
Returns:
Callable[[~.DestroyCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "destroy_crypto_key_version" not in self._stubs:
self._stubs["destroy_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/DestroyCryptoKeyVersion",
request_serializer=service.DestroyCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["destroy_crypto_key_version"]
@property
def restore_crypto_key_version(
self,
) -> Callable[[service.RestoreCryptoKeyVersionRequest], resources.CryptoKeyVersion]:
r"""Return a callable for the restore crypto key version method over gRPC.
Restore a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in the
[DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED]
state.
Upon restoration of the CryptoKeyVersion,
[state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set
to
[DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED],
and
[destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time]
will be cleared.
Returns:
Callable[[~.RestoreCryptoKeyVersionRequest],
~.CryptoKeyVersion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "restore_crypto_key_version" not in self._stubs:
self._stubs["restore_crypto_key_version"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/RestoreCryptoKeyVersion",
request_serializer=service.RestoreCryptoKeyVersionRequest.serialize,
response_deserializer=resources.CryptoKeyVersion.deserialize,
)
return self._stubs["restore_crypto_key_version"]
@property
def encrypt(self) -> Callable[[service.EncryptRequest], service.EncryptResponse]:
r"""Return a callable for the encrypt method over gRPC.
Encrypts data, so that it can only be recovered by a call to
[Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt]. The
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must
be
[ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
Returns:
Callable[[~.EncryptRequest],
~.EncryptResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "encrypt" not in self._stubs:
self._stubs["encrypt"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/Encrypt",
request_serializer=service.EncryptRequest.serialize,
response_deserializer=service.EncryptResponse.deserialize,
)
return self._stubs["encrypt"]
@property
def decrypt(self) -> Callable[[service.DecryptRequest], service.DecryptResponse]:
r"""Return a callable for the decrypt method over gRPC.
Decrypts data that was protected by
[Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. The
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must
be
[ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
Returns:
Callable[[~.DecryptRequest],
~.DecryptResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "decrypt" not in self._stubs:
self._stubs["decrypt"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/Decrypt",
request_serializer=service.DecryptRequest.serialize,
response_deserializer=service.DecryptResponse.deserialize,
)
return self._stubs["decrypt"]
@property
def asymmetric_sign(
self,
) -> Callable[[service.AsymmetricSignRequest], service.AsymmetricSignResponse]:
r"""Return a callable for the asymmetric sign method over gRPC.
Signs data using a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
ASYMMETRIC_SIGN, producing a signature that can be verified with
the public key retrieved from
[GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
Returns:
Callable[[~.AsymmetricSignRequest],
~.AsymmetricSignResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "asymmetric_sign" not in self._stubs:
self._stubs["asymmetric_sign"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/AsymmetricSign",
request_serializer=service.AsymmetricSignRequest.serialize,
response_deserializer=service.AsymmetricSignResponse.deserialize,
)
return self._stubs["asymmetric_sign"]
@property
def asymmetric_decrypt(
self,
) -> Callable[
[service.AsymmetricDecryptRequest], service.AsymmetricDecryptResponse
]:
r"""Return a callable for the asymmetric decrypt method over gRPC.
Decrypts data that was encrypted with a public key retrieved
from
[GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey]
corresponding to a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
ASYMMETRIC_DECRYPT.
Returns:
Callable[[~.AsymmetricDecryptRequest],
~.AsymmetricDecryptResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "asymmetric_decrypt" not in self._stubs:
self._stubs["asymmetric_decrypt"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/AsymmetricDecrypt",
request_serializer=service.AsymmetricDecryptRequest.serialize,
response_deserializer=service.AsymmetricDecryptResponse.deserialize,
)
return self._stubs["asymmetric_decrypt"]
@property
def mac_sign(self) -> Callable[[service.MacSignRequest], service.MacSignResponse]:
r"""Return a callable for the mac sign method over gRPC.
Signs data using a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] MAC,
producing a tag that can be verified by another source with the
same key.
Returns:
Callable[[~.MacSignRequest],
~.MacSignResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "mac_sign" not in self._stubs:
self._stubs["mac_sign"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/MacSign",
request_serializer=service.MacSignRequest.serialize,
response_deserializer=service.MacSignResponse.deserialize,
)
return self._stubs["mac_sign"]
@property
def mac_verify(
self,
) -> Callable[[service.MacVerifyRequest], service.MacVerifyResponse]:
r"""Return a callable for the mac verify method over gRPC.
Verifies MAC tag using a
[CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] MAC,
and returns a response that indicates whether or not the
verification was successful.
Returns:
Callable[[~.MacVerifyRequest],
~.MacVerifyResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "mac_verify" not in self._stubs:
self._stubs["mac_verify"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/MacVerify",
request_serializer=service.MacVerifyRequest.serialize,
response_deserializer=service.MacVerifyResponse.deserialize,
)
return self._stubs["mac_verify"]
@property
def generate_random_bytes(
self,
) -> Callable[
[service.GenerateRandomBytesRequest], service.GenerateRandomBytesResponse
]:
r"""Return a callable for the generate random bytes method over gRPC.
Generate random bytes using the Cloud KMS randomness
source in the provided location.
Returns:
Callable[[~.GenerateRandomBytesRequest],
~.GenerateRandomBytesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "generate_random_bytes" not in self._stubs:
self._stubs["generate_random_bytes"] = self.grpc_channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GenerateRandomBytes",
request_serializer=service.GenerateRandomBytesRequest.serialize,
response_deserializer=service.GenerateRandomBytesResponse.deserialize,
)
return self._stubs["generate_random_bytes"]
@property
def set_iam_policy(
self,
) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the IAM access control policy on the specified
function. Replaces any existing policy.
Returns:
Callable[[~.SetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/SetIamPolicy",
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@property
def get_iam_policy(
self,
) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the IAM access control policy for a function.
Returns an empty policy if the function exists and does
not have a policy set.
Returns:
Callable[[~.GetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/GetIamPolicy",
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
iam_policy_pb2.TestIamPermissionsResponse,
]:
r"""Return a callable for the test iam permissions method over gRPC.
Tests the specified permissions against the IAM access control
policy for a function. If the function does not exist, this will
return an empty set of permissions, not a NOT_FOUND error.
Returns:
Callable[[~.TestIamPermissionsRequest],
~.TestIamPermissionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/TestIamPermissions",
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
def close(self):
self.grpc_channel.close()
__all__ = ("KeyManagementServiceGrpcTransport",)
|
googleapis/python-kms
|
google/cloud/kms_v1/services/key_management_service/transports/grpc.py
|
Python
|
apache-2.0
| 50,118
|
#!/usr/bin/python
######################################################################
#
# File: kafka_to_mysql.py
#
# Copyright 2015 TiVo Inc. All Rights Reserved.
#
######################################################################
"""
Usage: kafka_to_mysql.py <kafka_topic> <kafka_broker> <mysql-ip> <mysql-port> <mysql-user> <mysql-password> <mysql_table>
"""
import json
import MySQLdb
from kafka import KafkaClient, KafkaConsumer
import sys
def usage():
print __doc__
sys.exit(1)
def main():
# R0915: "too many statements in function (>50)"
# pylint: disable=R0915
if len(sys.argv) != 8:
print "Wrong number of arguments"
usage()
(kafka_topic, kafka_broker, mysql_host, mysql_port, mysql_user, mysql_password, mysql_table) = sys.argv[1:8]
sql_db = MySQLdb.connect(
host = mysql_host,
port = int(mysql_port),
user = mysql_user,
passwd = mysql_password)
query = sql_db.cursor()
client = KafkaClient(kafka_broker)
consumer = KafkaConsumer(kafka_topic, metadata_broker_list = [kafka_broker],
auto_commit_enable = False,
auto_offset_reset='smallest')
last_offsets = {}
partition_ids = client.get_partition_ids_for_topic(kafka_topic)
for partition in partition_ids:
offsets = consumer.get_partition_offsets(kafka_topic, partition, -1, 1)
print offsets
# Don't really understand this format, so put in asserts
# (Pdb) consumer.get_partition_offsets("topicname", 0, -1, 1)
# (15471)
assert len(offsets) == 1
assert offsets[0] > 0
next_offset = offsets[0]
last_offset = next_offset - 1
last_offsets[partition] = last_offset
finished_partitions = set()
print last_offsets
count = 0
# mapping from primary key tuples, to row data
insert_batch = {}
insert_sql = None
for m in consumer:
if m.partition in finished_partitions:
continue
count += 1
payload = m.value
(first_line, rest) = payload.split("\r\n", 1)
(_notused, header_len, _body_len) = first_line.split(" ")
header_len = int(header_len)
body = rest[header_len:]
primary_key_str = m.key
# import pdb; pdb.set_trace()
primary_keys = json.loads(primary_key_str)
primary_tuples = sorted(primary_keys.items())
sorted_primary_key_names = [ k for (k,v) in primary_tuples ]
sorted_primary_key_values = [ int(v) for (k,v) in primary_tuples ]
if len(body) > 0:
# This is a write
data = json.loads(body)
# date fields have to be turned from a number back into a datetime object
date_fields = ['createDate', 'updateDate']
for d in date_fields:
if d not in data:
continue
val = data[d]
if val is None:
continue
if val == -62170156800000:
# this is hacky and a sign that i'm doing something wrong, I think.
val = "0000-00-00 00:00:00"
else:
val = val/1000
import datetime;
val = datetime.datetime.utcfromtimestamp(val)
data[d] = val
keys = [ k for (k, v) in sorted(data.items()) ]
values = [ v for (k, v) in sorted(data.items()) ]
keys_wo_primary = [ k for (k, v) in sorted(data.items()) ]
for p in sorted_primary_key_names:
keys_wo_primary.remove(p)
# e.g.
# insert into dbname.tablename (col1, col2) values (%s, %s) on duplicate key update col2 = values(col2)
# assuming that col1 is the primary key
insert_sql = """insert into %s """ % mysql_table
insert_sql += """ (%s) """ % (", ".join(keys))
insert_sql += " values (%s) " % (", ".join(["%s"] * len(values) ))
insert_sql += "on duplicate key update "
insert_sql += ", ".join(["%s = values(%s)" % (k, k) for k in keys_wo_primary ])
insert_batch[tuple(primary_tuples)] = tuple(values)
if len(insert_batch) > 5000:
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
else:
# This is a delete
if len(insert_batch) > 0 and insert_sql is not None:
# flush all writes before processing any deletes
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
# get the primary keys, and delete the row
where_clause = ' and '.join([ "%s = %%s" % k for k in sorted_primary_key_names ])
# e.g.
# delete from dbname.tablename where field1 = %s and field2 = %s
delete_sql = """delete from %s where %s""" % (mysql_table, where_clause)
values = tuple(sorted_primary_key_values)
query.execute(delete_sql, values)
sql_db.commit()
# how do I know when to stop?
print "Partition %d Offset %d of %d" % (m.partition, m.offset, last_offsets.get(m.partition))
if m.offset >= last_offsets.get(m.partition):
finished_partitions.add(m.partition)
if len(finished_partitions) == len(last_offsets):
# All partitions are done.
break
if len(insert_batch) > 0:
# flush any remaining writes
query.executemany(insert_sql, insert_batch.values())
sql_db.commit()
insert_batch = {}
print "Imported %d messages into mysql" % count
if __name__ == "__main__":
main()
|
TiVo/wombat
|
correctness/kafka_to_mysql.py
|
Python
|
apache-2.0
| 5,895
|
from __future__ import division
import datetime as dt
missing = object()
try:
import numpy as np
except ImportError:
np = None
def int_to_rgb(number):
"""Given an integer, return the rgb"""
number = int(number)
r = number % 256
g = (number // 256) % 256
b = (number // (256 * 256)) % 256
return r, g, b
def rgb_to_int(rgb):
"""Given an rgb, return an int"""
return rgb[0] + (rgb[1] * 256) + (rgb[2] * 256 * 256)
def get_duplicates(seq):
seen = set()
duplicates = set(x for x in seq if x in seen or seen.add(x))
return duplicates
def np_datetime_to_datetime(np_datetime):
ts = (np_datetime - np.datetime64('1970-01-01T00:00:00Z')) / np.timedelta64(1, 's')
dt_datetime = dt.datetime.utcfromtimestamp(ts)
return dt_datetime
class VBAWriter(object):
class Block(object):
def __init__(self, writer, start):
self.writer = writer
self.start = start
def __enter__(self):
self.writer.writeln(self.start)
self.writer._indent += 1
def __exit__(self, exc_type, exc_val, exc_tb):
self.writer._indent -= 1
#self.writer.writeln(self.end)
def __init__(self, f):
self.f = f
self._indent = 0
self._freshline = True
def block(self, template, **kwargs):
return VBAWriter.Block(self, template.format(**kwargs))
def start_block(self, template, **kwargs):
self.writeln(template, **kwargs)
self._indent += 1
def end_block(self, template, **kwargs):
self.writeln(template, **kwargs)
self._indent -= 1
def write(self, template, **kwargs):
if self._freshline:
self.f.write('\t' * self._indent)
self._freshline = False
if kwargs:
template = template.format(**kwargs)
self.f.write(template)
if template[-1] == '\n':
self._freshline = True
def write_label(self, label):
self._indent -= 1
self.write(label + ':\n')
self._indent += 1
def writeln(self, template, **kwargs):
self.write(template + '\n', **kwargs)
|
Juanlu001/xlwings
|
xlwings/utils.py
|
Python
|
apache-2.0
| 2,171
|
#!/usr/bin/python
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# End Copyright
# Little external FS (fact service) example
# Wrap Yahoo stock quotes as an FS.
# Pattern must specify a single ticker symbol "symbol".
# Pattern must specify at least one additional property from the set
legalProperties = {"bid", "ask", "change", "percentChange", "lastTradeSize"}
# curl 'http://download.finance.yahoo.com/d/quotes.csv?s=CMCSA&f=abc1p2k3&e=.csv'
# http://www.canbike.ca/information-technology/yahoo-finance-url-download-to-a-csv-file.html
# A more principled approach would allow the pattern to specify only a
# single additional property, but that decision is a separate
# discussion.
# Usage:
#
# curl -d '{"symbol":"CMCSA","bid":"?bid","ask":"?ask"}' 'http://localhost:6666/facts/search'
#
from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
import cgi # Better way now?
import json
import urllib2
import urllib
import re
PORT = 6666
def protest (response, message):
response.send_response(200)
response.send_header('Content-type','text/plain')
response.end_headers()
response.wfile.write(message) # Should probably be JSON
def getQuote (symbol):
uri = "http://download.finance.yahoo.com/d/quotes.csv?s=" + symbol + "&f=abc1p2k3&e=.csv"
print "uri ", uri
line = urllib2.urlopen(uri).read().strip()
print "got ", line, "\n"
line = re.sub(r'[%"\n]+', "", line)
print "clean ", line, "\n"
data = line.split(",")
ns = map(float, data)
q = {}
q["bid"] = ns[0]
q["ask"] = ns[1]
q["change"] = ns[2]
q["percentChange"] = ns[3]
q["lastTradeSize"] = ns[4]
return q
class handler(BaseHTTPRequestHandler):
def do_GET(self):
protest(self, "You should POST with json.\n")
return
def do_POST(self):
if not self.path == '/facts/search':
protest(self, "Only can do /facts/search.\n")
return
try:
content_length = int(self.headers['Content-Length'])
js = self.rfile.read(content_length)
m = json.loads(js)
if 'symbol' not in m:
protest(self, "Need symbol.\n")
return
symbol = m["symbol"]
del m["symbol"]
for p in m:
if p not in legalProperties:
protest(self, "Illegal property " + p + ".\n")
return
v = m[p]
if not v.startswith("?"):
protest(self, "Value " + v + " must be a variable.\n")
return
if len(v) < 2:
protest(self, "Need an named variable for " + v + ".\n")
return
q = getQuote(symbol)
print q, "\n"
bindings = {}
satisfied = True
for p in m:
print p, ": ", q[p], "\n"
if p in q:
bindings[m[p]] = q[p]
else:
satisfied = False
break
if satisfied:
js = json.dumps(bindings)
response = '{"Found":[{"Bindingss":[%s]}]}' % (js)
else:
response = '{"Found":[{"Bindingss":[]}]}'
self.send_response(200)
self.send_header('Content-type','application/json')
self.end_headers()
print 'response ', response
self.wfile.write(response)
except Exception as broke:
print broke, "\n"
protest(self, str(broke))
try:
server = HTTPServer(('', PORT), handler)
print 'Started weather FS on port ' , PORT
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the weather FS on ', PORT
server.socket.close()
|
Comcast/rulio
|
examples/stockfs.py
|
Python
|
apache-2.0
| 4,418
|
import os
from torch.utils.ffi import create_extension
sources = ["src/lib_cffi.cpp"]
headers = ["src/lib_cffi.h"]
extra_objects = ["src/bn.o"]
with_cuda = True
this_file = os.path.dirname(os.path.realpath(__file__))
extra_objects = [os.path.join(this_file, fname) for fname in extra_objects]
ffi = create_extension(
"_ext",
headers=headers,
sources=sources,
relative_to=__file__,
with_cuda=with_cuda,
extra_objects=extra_objects,
extra_compile_args=["-std=c++11"],
)
if __name__ == "__main__":
ffi.build()
|
Diyago/Machine-Learning-scripts
|
DEEP LEARNING/segmentation/Kaggle TGS Salt Identification Challenge/v2/modules/build.py
|
Python
|
apache-2.0
| 544
|
# Copyright 2015 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import logging
import yaml
import netaddr
import os
import re
import collections
import itertools
import shutil
import gevent
import gevent.queue
import gevent.event
import pan.xapi
from . import base
from . import actorbase
from . import table
from .utils import utc_millisec
LOG = logging.getLogger(__name__)
SUBRE = re.compile("[^A-Za-z0-9_]")
class DevicePusher(gevent.Greenlet):
def __init__(self, device, prefix, watermark, attributes, persistent):
super(DevicePusher, self).__init__()
self.device = device
self.xapi = pan.xapi.PanXapi(
tag=self.device.get('tag', None),
api_username=self.device.get('api_username', None),
api_password=self.device.get('api_password', None),
api_key=self.device.get('api_key', None),
port=self.device.get('port', None),
hostname=self.device.get('hostname', None),
serial=self.device.get('serial', None)
)
self.prefix = prefix
self.attributes = attributes
self.watermark = watermark
self.persistent = persistent
self.q = gevent.queue.Queue()
def put(self, op, address, value):
LOG.debug('adding %s:%s to device queue', op, address)
self.q.put([op, address, value])
def _get_registered_ip_tags(self, ip):
self.xapi.op(
cmd='<show><object><registered-ip><ip>%s</ip></registered-ip></object></show>' % ip,
vsys=self.device.get('vsys', None),
cmd_xml=False
)
entries = self.xapi.element_root.findall('./result/entry')
if entries is None or len(entries) == 0:
LOG.warning('%s: ip %s has no tags', self.device.get('hostname', None), ip)
return None
tags = [member.text for member in entries[0].findall('./tag/member')
if member.text and member.text.startswith(self.prefix)]
return tags
def _get_all_registered_ips(self):
cmd = (
'<show><object><registered-ip><tag><entry name="%s%s"/></tag></registered-ip></object></show>' %
(self.prefix, self.watermark)
)
self.xapi.op(
cmd=cmd,
vsys=self.device.get('vsys', None),
cmd_xml=False
)
entries = self.xapi.element_root.findall('./result/entry')
if not entries:
return
for entry in entries:
ip = entry.get("ip")
yield ip, self._get_registered_ip_tags(ip)
def _dag_message(self, type_, addresses):
message = [
"<uid-message>",
"<version>1.0</version>",
"<type>update</type>",
"<payload>"
]
persistent = ''
if type_ == 'register':
persistent = ' persistent="%d"' % (1 if self.persistent else 0)
message.append('<%s>' % type_)
if addresses is not None and len(addresses) != 0:
akeys = sorted(addresses.keys())
for a in akeys:
message.append(
'<entry ip="%s"%s>' % (a, persistent)
)
tags = sorted(addresses[a])
if tags is not None:
message.append('<tag>')
for t in tags:
message.append('<member>%s</member>' % t)
message.append('</tag>')
message.append('</entry>')
message.append('</%s>' % type_)
message.append('</payload></uid-message>')
return ''.join(message)
def _user_id(self, cmd=None):
try:
self.xapi.user_id(cmd=cmd,
vsys=self.device.get('vsys', None))
except gevent.GreenletExit:
raise
except pan.xapi.PanXapiError as e:
LOG.debug('%s', e)
if 'already exists, ignore' in str(e):
pass
elif 'does not exist, ignore unreg' in str(e):
pass
elif 'Failed to register' in str(e):
pass
else:
LOG.exception('XAPI exception in pusher for device %s: %s',
self.device.get('hostname', None), str(e))
raise
def _tags_from_value(self, value):
result = []
def _tag(t, v):
if type(v) == unicode:
v = v.encode('ascii', 'replace')
else:
v = str(v)
v = SUBRE.sub('_', v)
tag = '%s%s_%s' % (self.prefix, t, v)
return tag
for t in self.attributes:
if t in value:
if t == 'confidence':
confidence = value[t]
if confidence < 50:
tag = '%s%s_low' % (self.prefix, t)
elif confidence < 75:
tag = '%s%s_medium' % (self.prefix, t)
else:
tag = '%s%s_high' % (self.prefix, t)
result.append(tag)
else:
LOG.debug('%s %s %s', t, value[t], type(value[t]))
if isinstance(value[t], list):
for v in value[t]:
LOG.debug('%s', v)
result.append(_tag(t, v))
else:
result.append(_tag(t, value[t]))
else:
# XXX noop for this case?
result.append('%s%s_unknown' % (self.prefix, t))
LOG.debug('%s', result)
return set(result) # XXX eliminate duplicates
def _push(self, op, address, value):
tags = []
tags.append('%s%s' % (self.prefix, self.watermark))
tags += self._tags_from_value(value)
if len(tags) == 0:
tags = None
msg = self._dag_message(op, {address: tags})
self._user_id(cmd=msg)
def _init_resync(self):
ctags = collections.defaultdict(set)
while True:
op, address, value = self.q.get()
if op == 'EOI':
break
if op != 'init':
raise RuntimeError(
'DevicePusher %s - wrong op %s received in init phase' %
(self.device.get('hostname', None), op)
)
ctags[address].add('%s%s' % (self.prefix, self.watermark))
for t in self._tags_from_value(value):
ctags[address].add(t)
LOG.debug('%s', ctags)
register = collections.defaultdict(list)
unregister = collections.defaultdict(list)
for a, atags in self._get_all_registered_ips():
regtags = set()
if atags is not None:
for t in atags:
regtags.add(t)
added = ctags[a] - regtags
removed = regtags - ctags[a]
for t in added:
register[a].append(t)
for t in removed:
unregister[a].append(t)
ctags.pop(a)
# ips not in firewall
for a, atags in ctags.iteritems():
register[a] = atags
LOG.debug('register %s', register)
LOG.debug('unregister %s', unregister)
# XXX use constant for chunk size
if len(register) != 0:
addrs = iter(register)
for i in xrange(0, len(register), 1000):
rmsg = self._dag_message(
'register',
{k: register[k] for k in itertools.islice(addrs, 1000)}
)
self._user_id(cmd=rmsg)
if len(unregister) != 0:
addrs = iter(unregister)
for i in xrange(0, len(unregister), 1000):
urmsg = self._dag_message(
'unregister',
{k: unregister[k] for k in itertools.islice(addrs, 1000)}
)
self._user_id(cmd=urmsg)
def _run(self):
self._init_resync()
while True:
try:
op, address, value = self.q.peek()
self._push(op, address, value)
self.q.get() # discard processed message
except gevent.GreenletExit:
break
except pan.xapi.PanXapiError as e:
LOG.exception('XAPI exception in pusher for device %s: %s',
self.device.get('hostname', None), str(e))
raise
class DagPusher(actorbase.ActorBaseFT):
def __init__(self, name, chassis, config):
self.devices = []
self.device_pushers = []
self.device_list_glet = None
self.device_list_mtime = None
self.ageout_glet = None
self.last_ageout_run = None
self.hup_event = gevent.event.Event()
super(DagPusher, self).__init__(name, chassis, config)
def configure(self):
super(DagPusher, self).configure()
self.device_list_path = self.config.get('device_list', None)
if self.device_list_path is None:
self.device_list_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_device_list.yml' % self.name
)
self.age_out = self.config.get('age_out', 3600)
self.age_out_interval = self.config.get('age_out_interval', None)
self.tag_prefix = self.config.get('tag_prefix', 'mmld_')
self.tag_watermark = self.config.get('tag_watermark', 'pushed')
self.tag_attributes = self.config.get(
'tag_attributes',
['confidence', 'direction']
)
self.persistent_registered_ips = self.config.get(
'persistent_registered_ips',
True
)
def _initialize_table(self, truncate=False):
self.table = table.Table(self.name, truncate=truncate)
self.table.create_index('_age_out')
def initialize(self):
self._initialize_table()
def rebuild(self):
self.rebuild_flag = True
self._initialize_table(truncate=True)
def reset(self):
self._initialize_table(truncate=True)
def _validate_ip(self, indicator, value):
type_ = value.get('type', None)
if type_ not in ['IPv4', 'IPv6']:
LOG.error('%s - invalid indicator type, ignored: %s',
self.name, type_)
self.statistics['ignored'] += 1
return
if '-' in indicator:
i1, i2 = indicator.split('-', 1)
if i1 != i2:
LOG.error('%s - indicator range must be equal, ignored: %s',
self.name, indicator)
self.statistics['ignored'] += 1
return
indicator = i1
try:
address = netaddr.IPNetwork(indicator)
except netaddr.core.AddrFormatError as e:
LOG.error('%s - invalid IP address received, ignored: %s',
self.name, e)
self.statistics['ignored'] += 1
return
if address.size != 1:
LOG.error('%s - IP network received, ignored: %s',
self.name, address)
self.statistics['ignored'] += 1
return
if type_ == 'IPv4' and address.version != 4 or \
type_ == 'IPv6' and address.version != 6:
LOG.error('%s - IP version mismatch, ignored',
self.name)
self.statistics['ignored'] += 1
return
return address
@base._counting('update.processed')
def filtered_update(self, source=None, indicator=None, value=None):
address = self._validate_ip(indicator, value)
if address is None:
return
current_value = self.table.get(str(address))
now = utc_millisec()
age_out = now+self.age_out*1000
value['_age_out'] = age_out
self.statistics['added'] += 1
self.table.put(str(address), value)
LOG.debug('%s - #indicators: %d', self.name, self.length())
value.pop('_age_out')
uflag = False
if current_value is not None:
for t in self.tag_attributes:
cv = current_value.get(t, None)
nv = value.get(t, None)
if isinstance(cv, list) or isinstance(nv, list):
uflag |= set(cv) != set(nv)
else:
uflag |= cv != nv
LOG.debug('uflag %s current %s new %s', uflag, current_value, value)
for p in self.device_pushers:
if uflag:
p.put('unregister', str(address), current_value)
p.put('register', str(address), value)
@base._counting('withdraw.processed')
def filtered_withdraw(self, source=None, indicator=None, value=None):
address = self._validate_ip(indicator, value)
if address is None:
return
current_value = self.table.get(str(address))
if current_value is None:
LOG.warning('%s - unknown indicator received, ignored: %s',
self.name, address)
self.statistics['ignored'] += 1
return
current_value.pop('_age_out', None)
self.statistics['removed'] += 1
self.table.delete(str(address))
LOG.debug('%s - #indicators: %d', self.name, self.length())
for p in self.device_pushers:
p.put('unregister', str(address), current_value)
def _age_out_run(self):
while True:
try:
now = utc_millisec()
LOG.debug('now: %s', now)
for i, v in self.table.query(index='_age_out',
to_key=now-1,
include_value=True):
LOG.debug('%s - %s %s aged out', self.name, i, v)
for dp in self.device_pushers:
dp.put(
op='unregister',
address=i,
value=v
)
self.statistics['aged_out'] += 1
self.table.delete(i)
self.last_ageout_run = now
LOG.debug('%s - #indicators: %d', self.name, self.length())
except gevent.GreenletExit:
break
except Exception:
LOG.exception('Exception in _age_out_loop')
try:
gevent.sleep(self.age_out_interval)
except gevent.GreenletExit:
break
def _spawn_device_pusher(self, device):
dp = DevicePusher(
device,
self.tag_prefix,
self.tag_watermark,
self.tag_attributes,
self.persistent_registered_ips
)
dp.link_exception(self._device_pusher_died)
for i, v in self.table.query(include_value=True):
LOG.debug('%s - addding %s to init', self.name, i)
dp.put('init', i, v)
dp.put('EOI', None, None)
return dp
def _device_pusher_died(self, g):
try:
g.get()
except gevent.GreenletExit:
pass
except Exception:
LOG.exception('%s - exception in greenlet for %s, '
'respawning in 60 seconds',
self.name, g.device['hostname'])
for idx in range(len(self.device_pushers)):
if self.device_pushers[idx].device == g.device:
break
else:
LOG.info('%s - device pusher for %s removed,' +
' respawning aborted',
self.name, g.device['hostname'])
g = None
return
dp = self._spawn_device_pusher(g.device)
self.device_pushers[idx] = dp
dp.start_later(60)
def _load_device_list(self):
with open(self.device_list_path, 'r') as dlf:
dlist = yaml.safe_load(dlf)
added = [d for i, d in enumerate(dlist) if d not in self.devices]
removed = [i for i, d in enumerate(self.devices) if d not in dlist]
dpushers = []
for d in dlist:
if d in added:
dp = self._spawn_device_pusher(d)
dpushers.append(dp)
else:
idx = self.devices.index(d)
dpushers.append(self.device_pushers[idx])
for idx in removed:
self.device_pushers[idx].kill()
self.device_pushers = dpushers
self.devices = dlist
for g in self.device_pushers:
if g.value is None and not g.started:
g.start()
def _huppable_wait(self, wait_time):
hup_called = self.hup_event.wait(timeout=wait_time)
if hup_called:
LOG.debug('%s - clearing poll event', self.name)
self.hup_event.clear()
def _device_list_monitor(self):
if self.device_list_path is None:
LOG.warning('%s - no device_list path configured', self.name)
return
while True:
try:
mtime = os.stat(self.device_list_path).st_mtime
except OSError:
LOG.debug('%s - error checking mtime of %s',
self.name, self.device_list_path)
self._huppable_wait(5)
continue
if mtime != self.device_list_mtime:
self.device_list_mtime = mtime
try:
self._load_device_list()
LOG.info('%s - device list loaded', self.name)
except Exception:
LOG.exception('%s - exception loading device list',
self.name)
self._huppable_wait(5)
def mgmtbus_status(self):
result = super(DagPusher, self).mgmtbus_status()
result['devices'] = len(self.devices)
return result
def length(self, source=None):
return self.table.num_indicators
def start(self):
super(DagPusher, self).start()
if self.device_list_glet is not None:
return
self.device_list_glet = gevent.spawn_later(
2,
self._device_list_monitor
)
if self.age_out_interval is not None:
self.ageout_glet = gevent.spawn(self._age_out_run)
def stop(self):
super(DagPusher, self).stop()
if self.device_list_glet is None:
return
for g in self.device_pushers:
g.kill()
self.device_list_glet.kill()
if self.ageout_glet is not None:
self.ageout_glet.kill()
self.table.close()
def hup(self, source=None):
LOG.info('%s - hup received, reload device list', self.name)
self.hup_event.set()
@staticmethod
def gc(name, config=None):
actorbase.ActorBaseFT.gc(name, config=config)
shutil.rmtree(name, ignore_errors=True)
device_list_path = None
if config is not None:
device_list_path = config.get('device_list', None)
if device_list_path is None:
device_list_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_device_list.yml'.format(name)
)
try:
os.remove(device_list_path)
except OSError:
pass
|
PaloAltoNetworks/minemeld-core
|
minemeld/ft/dag.py
|
Python
|
apache-2.0
| 20,185
|
from django.contrib.staticfiles import storage
# Configure the permissions used by ./manage.py collectstatic
# See https://docs.djangoproject.com/en/1.10/ref/contrib/staticfiles/
class TTStaticFilesStorage(storage.StaticFilesStorage):
def __init__(self, *args, **kwargs):
kwargs['file_permissions_mode'] = 0o644
kwargs['directory_permissions_mode'] = 0o755
super(TTStaticFilesStorage, self).__init__(*args, **kwargs)
|
Goodly/TextThresher
|
thresher_backend/storage.py
|
Python
|
apache-2.0
| 446
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-12 16:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tocayoapp', '0007_gender_description'),
]
operations = [
migrations.AlterField(
model_name='gender',
name='description',
field=models.CharField(max_length=15),
),
]
|
philpot/tocayo
|
tocayoproj/tocayoapp/migrations/0008_auto_20151212_1607.py
|
Python
|
apache-2.0
| 455
|
import logging
import os
import os.path
import shutil
import subprocess
import tempfile
import time
from six.moves import urllib
import uuid
from six.moves.urllib.parse import urlparse # pylint: disable=E0611,F0401
from test.service import ExternalService, SpawnedService
from test.testutil import get_open_port
log = logging.getLogger(__name__)
class Fixture(object):
kafka_version = os.environ.get('KAFKA_VERSION', '0.8.0')
scala_version = os.environ.get("SCALA_VERSION", '2.8.0')
project_root = os.environ.get('PROJECT_ROOT', os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
kafka_root = os.environ.get("KAFKA_ROOT", os.path.join(project_root, 'servers', kafka_version, "kafka-bin"))
ivy_root = os.environ.get('IVY_ROOT', os.path.expanduser("~/.ivy2/cache"))
@classmethod
def download_official_distribution(cls,
kafka_version=None,
scala_version=None,
output_dir=None):
if not kafka_version:
kafka_version = cls.kafka_version
if not scala_version:
scala_version = cls.scala_version
if not output_dir:
output_dir = os.path.join(cls.project_root, 'servers', 'dist')
distfile = 'kafka_%s-%s' % (scala_version, kafka_version,)
url_base = 'https://archive.apache.org/dist/kafka/%s/' % (kafka_version,)
output_file = os.path.join(output_dir, distfile + '.tgz')
if os.path.isfile(output_file):
log.info("Found file already on disk: %s", output_file)
return output_file
# New tarballs are .tgz, older ones are sometimes .tar.gz
try:
url = url_base + distfile + '.tgz'
log.info("Attempting to download %s", url)
response = urllib.request.urlopen(url)
except urllib.error.HTTPError:
log.exception("HTTP Error")
url = url_base + distfile + '.tar.gz'
log.info("Attempting to download %s", url)
response = urllib.request.urlopen(url)
log.info("Saving distribution file to %s", output_file)
with open(output_file, 'w') as output_file_fd:
output_file_fd.write(response.read())
return output_file
@classmethod
def test_resource(cls, filename):
return os.path.join(cls.project_root, "servers", cls.kafka_version, "resources", filename)
@classmethod
def kafka_run_class_args(cls, *args):
result = [os.path.join(cls.kafka_root, 'bin', 'kafka-run-class.sh')]
result.extend(args)
return result
@classmethod
def kafka_run_class_env(cls):
env = os.environ.copy()
env['KAFKA_LOG4J_OPTS'] = "-Dlog4j.configuration=file:%s" % cls.test_resource("log4j.properties")
return env
@classmethod
def render_template(cls, source_file, target_file, binding):
with open(source_file, "r") as handle:
template = handle.read()
with open(target_file, "w") as handle:
handle.write(template.format(**binding))
class ZookeeperFixture(Fixture):
@classmethod
def instance(cls):
if "ZOOKEEPER_URI" in os.environ:
parse = urlparse(os.environ["ZOOKEEPER_URI"])
(host, port) = (parse.hostname, parse.port)
fixture = ExternalService(host, port)
else:
(host, port) = ("127.0.0.1", get_open_port())
fixture = cls(host, port)
fixture.open()
return fixture
def __init__(self, host, port):
self.host = host
self.port = port
self.tmp_dir = None
self.child = None
def out(self, message):
log.info("*** Zookeeper [%s:%d]: %s", self.host, self.port, message)
def open(self):
self.tmp_dir = tempfile.mkdtemp()
self.out("Running local instance...")
log.info(" host = %s", self.host)
log.info(" port = %s", self.port)
log.info(" tmp_dir = %s", self.tmp_dir)
# Generate configs
template = self.test_resource("zookeeper.properties")
properties = os.path.join(self.tmp_dir, "zookeeper.properties")
self.render_template(template, properties, vars(self))
# Configure Zookeeper child process
args = self.kafka_run_class_args("org.apache.zookeeper.server.quorum.QuorumPeerMain", properties)
env = self.kafka_run_class_env()
# Party!
self.out("Starting...")
timeout = 5
max_timeout = 30
backoff = 1
while True:
self.child = SpawnedService(args, env)
self.child.start()
timeout = min(timeout, max_timeout)
if self.child.wait_for(r"binding to port", timeout=timeout):
break
self.child.stop()
timeout *= 2
time.sleep(backoff)
self.out("Done!")
def close(self):
self.out("Stopping...")
self.child.stop()
self.child = None
self.out("Done!")
shutil.rmtree(self.tmp_dir)
class KafkaFixture(Fixture):
@classmethod
def instance(cls, broker_id, zk_host, zk_port, zk_chroot=None, replicas=1, partitions=2):
if zk_chroot is None:
zk_chroot = "kafka-python_" + str(uuid.uuid4()).replace("-", "_")
if "KAFKA_URI" in os.environ:
parse = urlparse(os.environ["KAFKA_URI"])
(host, port) = (parse.hostname, parse.port)
fixture = ExternalService(host, port)
else:
(host, port) = ("127.0.0.1", get_open_port())
fixture = KafkaFixture(host, port, broker_id, zk_host, zk_port, zk_chroot, replicas, partitions)
fixture.open()
return fixture
def __init__(self, host, port, broker_id, zk_host, zk_port, zk_chroot, replicas=1, partitions=2):
self.host = host
self.port = port
self.broker_id = broker_id
self.zk_host = zk_host
self.zk_port = zk_port
self.zk_chroot = zk_chroot
self.replicas = replicas
self.partitions = partitions
self.tmp_dir = None
self.child = None
self.running = False
def out(self, message):
log.info("*** Kafka [%s:%d]: %s", self.host, self.port, message)
def open(self):
if self.running:
self.out("Instance already running")
return
self.tmp_dir = tempfile.mkdtemp()
self.out("Running local instance...")
log.info(" host = %s", self.host)
log.info(" port = %s", self.port)
log.info(" broker_id = %s", self.broker_id)
log.info(" zk_host = %s", self.zk_host)
log.info(" zk_port = %s", self.zk_port)
log.info(" zk_chroot = %s", self.zk_chroot)
log.info(" replicas = %s", self.replicas)
log.info(" partitions = %s", self.partitions)
log.info(" tmp_dir = %s", self.tmp_dir)
# Create directories
os.mkdir(os.path.join(self.tmp_dir, "logs"))
os.mkdir(os.path.join(self.tmp_dir, "data"))
# Generate configs
template = self.test_resource("kafka.properties")
properties = os.path.join(self.tmp_dir, "kafka.properties")
self.render_template(template, properties, vars(self))
# Party!
self.out("Creating Zookeeper chroot node...")
args = self.kafka_run_class_args("org.apache.zookeeper.ZooKeeperMain",
"-server", "%s:%d" % (self.zk_host, self.zk_port),
"create",
"/%s" % self.zk_chroot,
"kafka-python")
env = self.kafka_run_class_env()
proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if proc.wait() != 0:
self.out("Failed to create Zookeeper chroot node")
self.out(proc.stdout.read())
self.out(proc.stderr.read())
raise RuntimeError("Failed to create Zookeeper chroot node")
self.out("Done!")
self.out("Starting...")
# Configure Kafka child process
args = self.kafka_run_class_args("kafka.Kafka", properties)
env = self.kafka_run_class_env()
timeout = 5
max_timeout = 30
backoff = 1
while True:
self.child = SpawnedService(args, env)
self.child.start()
timeout = min(timeout, max_timeout)
if self.child.wait_for(r"\[Kafka Server %d\], Started" %
self.broker_id, timeout=timeout):
break
self.child.stop()
timeout *= 2
time.sleep(backoff)
self.out("Done!")
self.running = True
def close(self):
if not self.running:
self.out("Instance already stopped")
return
self.out("Stopping...")
self.child.stop()
self.child = None
self.out("Done!")
shutil.rmtree(self.tmp_dir)
self.running = False
|
gamechanger/kafka-python
|
test/fixtures.py
|
Python
|
apache-2.0
| 9,197
|
# Copyright (c) 2012 NetApp, Inc. All rights reserved.
# Copyright (c) 2014 Ben Swartzlander. All rights reserved.
# Copyright (c) 2014 Navneet Singh. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2014 Andrew Kerr. All rights reserved.
# Copyright (c) 2014 Jeff Applewhite. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver library for NetApp 7/C-mode block storage systems.
"""
import math
import sys
import uuid
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.volume.drivers.netapp.dataontap.client import api as na_api
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
class NetAppLun(object):
"""Represents a LUN on NetApp storage."""
def __init__(self, handle, name, size, metadata_dict):
self.handle = handle
self.name = name
self.size = size
self.metadata = metadata_dict or {}
def get_metadata_property(self, prop):
"""Get the metadata property of a LUN."""
if prop in self.metadata:
return self.metadata[prop]
name = self.name
LOG.debug("No metadata property %(prop)s defined for the LUN %(name)s",
{'prop': prop, 'name': name})
def __str__(self, *args, **kwargs):
return 'NetApp LUN [handle:%s, name:%s, size:%s, metadata:%s]' % (
self.handle, self.name, self.size, self.metadata)
class NetAppBlockStorageLibrary(object):
"""NetApp block storage library for Data ONTAP."""
# do not increment this as it may be used in volume type definitions
VERSION = "1.0.0"
REQUIRED_FLAGS = ['netapp_login', 'netapp_password',
'netapp_server_hostname']
ALLOWED_LUN_OS_TYPES = ['linux', 'aix', 'hpux', 'image', 'windows',
'windows_2008', 'windows_gpt', 'solaris',
'solaris_efi', 'netware', 'openvms', 'hyper_v']
ALLOWED_IGROUP_HOST_TYPES = ['linux', 'aix', 'hpux', 'windows', 'solaris',
'netware', 'default', 'vmware', 'openvms',
'xen', 'hyper_v']
DEFAULT_LUN_OS = 'linux'
DEFAULT_HOST_TYPE = 'linux'
def __init__(self, driver_name, driver_protocol, **kwargs):
na_utils.validate_instantiation(**kwargs)
self.driver_name = driver_name
self.driver_protocol = driver_protocol
self.zapi_client = None
self._stats = {}
self.lun_table = {}
self.lun_ostype = None
self.host_type = None
self.lookup_service = fczm_utils.create_lookup_service()
self.app_version = kwargs.get("app_version", "unknown")
self.configuration = kwargs['configuration']
self.configuration.append_config_values(na_opts.netapp_connection_opts)
self.configuration.append_config_values(na_opts.netapp_basicauth_opts)
self.configuration.append_config_values(na_opts.netapp_transport_opts)
self.configuration.append_config_values(
na_opts.netapp_provisioning_opts)
self.configuration.append_config_values(na_opts.netapp_san_opts)
def do_setup(self, context):
na_utils.check_flags(self.REQUIRED_FLAGS, self.configuration)
self.lun_ostype = (self.configuration.netapp_lun_ostype
or self.DEFAULT_LUN_OS)
self.host_type = (self.configuration.netapp_host_type
or self.DEFAULT_HOST_TYPE)
def check_for_setup_error(self):
"""Check that the driver is working and can communicate.
Discovers the LUNs on the NetApp server.
"""
if self.lun_ostype not in self.ALLOWED_LUN_OS_TYPES:
msg = _("Invalid value for NetApp configuration"
" option netapp_lun_ostype.")
LOG.error(msg)
raise exception.NetAppDriverException(msg)
if self.host_type not in self.ALLOWED_IGROUP_HOST_TYPES:
msg = _("Invalid value for NetApp configuration"
" option netapp_host_type.")
LOG.error(msg)
raise exception.NetAppDriverException(msg)
lun_list = self.zapi_client.get_lun_list()
self._extract_and_populate_luns(lun_list)
LOG.debug("Success getting list of LUNs from server.")
def get_pool(self, volume):
"""Return pool name where volume resides.
:param volume: The volume hosted by the driver.
:return: Name of the pool where given volume is hosted.
"""
name = volume['name']
metadata = self._get_lun_attr(name, 'metadata') or dict()
return metadata.get('Volume', None)
def create_volume(self, volume):
"""Driver entry point for creating a new volume (Data ONTAP LUN)."""
LOG.debug('create_volume on %s', volume['host'])
# get Data ONTAP volume name as pool name
pool_name = volume_utils.extract_host(volume['host'], level='pool')
if pool_name is None:
msg = _("Pool is not available in the volume host field.")
raise exception.InvalidHost(reason=msg)
extra_specs = na_utils.get_volume_extra_specs(volume)
lun_name = volume['name']
size = int(volume['size']) * units.Gi
metadata = {'OsType': self.lun_ostype,
'SpaceReserved': 'true',
'Path': '/vol/%s/%s' % (pool_name, lun_name)}
qos_policy_group_info = self._setup_qos_for_volume(volume, extra_specs)
qos_policy_group_name = (
na_utils.get_qos_policy_group_name_from_info(
qos_policy_group_info))
try:
self._create_lun(pool_name, lun_name, size, metadata,
qos_policy_group_name)
except Exception:
LOG.exception(_LE("Exception creating LUN %(name)s in pool "
"%(pool)s."),
{'name': lun_name, 'pool': pool_name})
self._mark_qos_policy_group_for_deletion(qos_policy_group_info)
msg = _("Volume %s could not be created.")
raise exception.VolumeBackendAPIException(data=msg % (
volume['name']))
LOG.debug('Created LUN with name %(name)s and QoS info %(qos)s',
{'name': lun_name, 'qos': qos_policy_group_info})
metadata['Path'] = '/vol/%s/%s' % (pool_name, lun_name)
metadata['Volume'] = pool_name
metadata['Qtree'] = None
handle = self._create_lun_handle(metadata)
self._add_lun_to_table(NetAppLun(handle, lun_name, size, metadata))
def _setup_qos_for_volume(self, volume, extra_specs):
return None
def _mark_qos_policy_group_for_deletion(self, qos_policy_group_info):
return
def delete_volume(self, volume):
"""Driver entry point for destroying existing volumes."""
name = volume['name']
metadata = self._get_lun_attr(name, 'metadata')
if not metadata:
LOG.warning(_LW("No entry in LUN table for volume/snapshot"
" %(name)s."), {'name': name})
return
self.zapi_client.destroy_lun(metadata['Path'])
self.lun_table.pop(name)
def ensure_export(self, context, volume):
"""Driver entry point to get the export info for an existing volume."""
handle = self._get_lun_attr(volume['name'], 'handle')
return {'provider_location': handle}
def create_export(self, context, volume):
"""Driver entry point to get the export info for a new volume."""
handle = self._get_lun_attr(volume['name'], 'handle')
return {'provider_location': handle}
def remove_export(self, context, volume):
"""Driver entry point to remove an export for a volume.
Since exporting is idempotent in this driver, we have nothing
to do for unexporting.
"""
pass
def create_snapshot(self, snapshot):
"""Driver entry point for creating a snapshot.
This driver implements snapshots by using efficient single-file
(LUN) cloning.
"""
vol_name = snapshot['volume_name']
snapshot_name = snapshot['name']
lun = self._get_lun_from_table(vol_name)
self._clone_lun(lun.name, snapshot_name, space_reserved='false')
def delete_snapshot(self, snapshot):
"""Driver entry point for deleting a snapshot."""
self.delete_volume(snapshot)
LOG.debug("Snapshot %s deletion successful", snapshot['name'])
def create_volume_from_snapshot(self, volume, snapshot):
source = {'name': snapshot['name'], 'size': snapshot['volume_size']}
return self._clone_source_to_destination(source, volume)
def create_cloned_volume(self, volume, src_vref):
src_lun = self._get_lun_from_table(src_vref['name'])
source = {'name': src_lun.name, 'size': src_vref['size']}
return self._clone_source_to_destination(source, volume)
def _clone_source_to_destination(self, source, destination_volume):
source_size = source['size']
destination_size = destination_volume['size']
source_name = source['name']
destination_name = destination_volume['name']
extra_specs = na_utils.get_volume_extra_specs(destination_volume)
qos_policy_group_info = self._setup_qos_for_volume(
destination_volume, extra_specs)
qos_policy_group_name = (
na_utils.get_qos_policy_group_name_from_info(
qos_policy_group_info))
try:
self._clone_lun(source_name, destination_name,
space_reserved='true',
qos_policy_group_name=qos_policy_group_name)
if destination_size != source_size:
try:
self.extend_volume(
destination_volume, destination_size,
qos_policy_group_name=qos_policy_group_name)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(
_LE("Resizing %s failed. Cleaning volume."),
destination_volume['id'])
self.delete_volume(destination_volume)
except Exception:
LOG.exception(_LE("Exception cloning volume %(name)s from source "
"volume %(source)s."),
{'name': destination_name, 'source': source_name})
self._mark_qos_policy_group_for_deletion(qos_policy_group_info)
msg = _("Volume %s could not be created from source volume.")
raise exception.VolumeBackendAPIException(
data=msg % destination_name)
def _create_lun(self, volume_name, lun_name, size,
metadata, qos_policy_group_name=None):
"""Creates a LUN, handling Data ONTAP differences as needed."""
raise NotImplementedError()
def _create_lun_handle(self, metadata):
"""Returns LUN handle based on filer type."""
raise NotImplementedError()
def _extract_lun_info(self, lun):
"""Extracts the LUNs from API and populates the LUN table."""
meta_dict = self._create_lun_meta(lun)
path = lun.get_child_content('path')
(_rest, _splitter, name) = path.rpartition('/')
handle = self._create_lun_handle(meta_dict)
size = lun.get_child_content('size')
return NetAppLun(handle, name, size, meta_dict)
def _extract_and_populate_luns(self, api_luns):
"""Extracts the LUNs from API and populates the LUN table."""
for lun in api_luns:
discovered_lun = self._extract_lun_info(lun)
self._add_lun_to_table(discovered_lun)
def _map_lun(self, name, initiator_list, initiator_type, lun_id=None):
"""Maps LUN to the initiator(s) and returns LUN ID assigned."""
metadata = self._get_lun_attr(name, 'metadata')
path = metadata['Path']
igroup_name, ig_host_os, ig_type = self._get_or_create_igroup(
initiator_list, initiator_type, self.host_type)
if ig_host_os != self.host_type:
LOG.warning(_LW("LUN misalignment may occur for current"
" initiator group %(ig_nm)s) with host OS type"
" %(ig_os)s. Please configure initiator group"
" manually according to the type of the"
" host OS."),
{'ig_nm': igroup_name, 'ig_os': ig_host_os})
try:
return self.zapi_client.map_lun(path, igroup_name, lun_id=lun_id)
except na_api.NaApiError:
exc_info = sys.exc_info()
(_igroup, lun_id) = self._find_mapped_lun_igroup(path,
initiator_list)
if lun_id is not None:
return lun_id
else:
raise exc_info[0], exc_info[1], exc_info[2]
def _unmap_lun(self, path, initiator_list):
"""Unmaps a LUN from given initiator."""
(igroup_name, _lun_id) = self._find_mapped_lun_igroup(path,
initiator_list)
self.zapi_client.unmap_lun(path, igroup_name)
def _find_mapped_lun_igroup(self, path, initiator_list):
"""Find an igroup for a LUN mapped to the given initiator(s)."""
raise NotImplementedError()
def _has_luns_mapped_to_initiators(self, initiator_list):
"""Checks whether any LUNs are mapped to the given initiator(s)."""
return self.zapi_client.has_luns_mapped_to_initiators(initiator_list)
def _get_or_create_igroup(self, initiator_list, initiator_group_type,
host_os_type):
"""Checks for an igroup for a set of one or more initiators.
Creates igroup if not already present with given host os type,
igroup type and adds initiators.
"""
igroups = self.zapi_client.get_igroup_by_initiators(initiator_list)
igroup_name = None
if igroups:
igroup = igroups[0]
igroup_name = igroup['initiator-group-name']
host_os_type = igroup['initiator-group-os-type']
initiator_group_type = igroup['initiator-group-type']
if not igroup_name:
igroup_name = self._create_igroup_add_initiators(
initiator_group_type, host_os_type, initiator_list)
return igroup_name, host_os_type, initiator_group_type
def _create_igroup_add_initiators(self, initiator_group_type,
host_os_type, initiator_list):
"""Creates igroup and adds initiators."""
igroup_name = na_utils.OPENSTACK_PREFIX + six.text_type(uuid.uuid4())
self.zapi_client.create_igroup(igroup_name, initiator_group_type,
host_os_type)
for initiator in initiator_list:
self.zapi_client.add_igroup_initiator(igroup_name, initiator)
return igroup_name
def _add_lun_to_table(self, lun):
"""Adds LUN to cache table."""
if not isinstance(lun, NetAppLun):
msg = _("Object is not a NetApp LUN.")
raise exception.VolumeBackendAPIException(data=msg)
self.lun_table[lun.name] = lun
def _get_lun_from_table(self, name):
"""Gets LUN from cache table.
Refreshes cache if LUN not found in cache.
"""
lun = self.lun_table.get(name)
if lun is None:
lun_list = self.zapi_client.get_lun_list()
self._extract_and_populate_luns(lun_list)
lun = self.lun_table.get(name)
if lun is None:
raise exception.VolumeNotFound(volume_id=name)
return lun
def _clone_lun(self, name, new_name, space_reserved='true',
qos_policy_group_name=None, src_block=0, dest_block=0,
block_count=0):
"""Clone LUN with the given name to the new name."""
raise NotImplementedError()
def _get_lun_attr(self, name, attr):
"""Get the LUN attribute if found else None."""
try:
attr = getattr(self._get_lun_from_table(name), attr)
return attr
except exception.VolumeNotFound as e:
LOG.error(_LE("Message: %s"), e.msg)
except Exception as e:
LOG.error(_LE("Error getting LUN attribute. Exception: %s"), e)
return None
def _create_lun_meta(self, lun):
raise NotImplementedError()
def _get_fc_target_wwpns(self, include_partner=True):
raise NotImplementedError()
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self._update_volume_stats()
return self._stats
def _update_volume_stats(self):
raise NotImplementedError()
def extend_volume(self, volume, new_size, qos_policy_group_name=None):
"""Extend an existing volume to the new size."""
name = volume['name']
lun = self._get_lun_from_table(name)
path = lun.metadata['Path']
curr_size_bytes = six.text_type(lun.size)
new_size_bytes = six.text_type(int(new_size) * units.Gi)
# Reused by clone scenarios.
# Hence comparing the stored size.
if curr_size_bytes != new_size_bytes:
lun_geometry = self.zapi_client.get_lun_geometry(path)
if (lun_geometry and lun_geometry.get("max_resize")
and int(lun_geometry.get("max_resize")) >=
int(new_size_bytes)):
self.zapi_client.do_direct_resize(path, new_size_bytes)
else:
self._do_sub_clone_resize(
path, new_size_bytes,
qos_policy_group_name=qos_policy_group_name)
self.lun_table[name].size = new_size_bytes
else:
LOG.info(_LI("No need to extend volume %s"
" as it is already the requested new size."), name)
def _get_vol_option(self, volume_name, option_name):
"""Get the value for the volume option."""
value = None
options = self.zapi_client.get_volume_options(volume_name)
for opt in options:
if opt.get_child_content('name') == option_name:
value = opt.get_child_content('value')
break
return value
def _do_sub_clone_resize(self, path, new_size_bytes,
qos_policy_group_name=None):
"""Does sub LUN clone after verification.
Clones the block ranges and swaps
the LUNs also deletes older LUN
after a successful clone.
"""
seg = path.split("/")
LOG.info(_LI("Resizing LUN %s to new size using clone operation."),
seg[-1])
name = seg[-1]
vol_name = seg[2]
lun = self._get_lun_from_table(name)
metadata = lun.metadata
compression = self._get_vol_option(vol_name, 'compression')
if compression == "on":
msg = _('%s cannot be resized using clone operation'
' as it is hosted on compressed volume')
raise exception.VolumeBackendAPIException(data=msg % name)
else:
block_count = self._get_lun_block_count(path)
if block_count == 0:
msg = _('%s cannot be resized using clone operation'
' as it contains no blocks.')
raise exception.VolumeBackendAPIException(data=msg % name)
new_lun = 'new-%s' % name
self.zapi_client.create_lun(
vol_name, new_lun, new_size_bytes, metadata,
qos_policy_group_name=qos_policy_group_name)
try:
self._clone_lun(name, new_lun, block_count=block_count,
qos_policy_group_name=qos_policy_group_name)
self._post_sub_clone_resize(path)
except Exception:
with excutils.save_and_reraise_exception():
new_path = '/vol/%s/%s' % (vol_name, new_lun)
self.zapi_client.destroy_lun(new_path)
def _post_sub_clone_resize(self, path):
"""Try post sub clone resize in a transactional manner."""
st_tm_mv, st_nw_mv, st_del_old = None, None, None
seg = path.split("/")
LOG.info(_LI("Post clone resize LUN %s"), seg[-1])
new_lun = 'new-%s' % (seg[-1])
tmp_lun = 'tmp-%s' % (seg[-1])
tmp_path = "/vol/%s/%s" % (seg[2], tmp_lun)
new_path = "/vol/%s/%s" % (seg[2], new_lun)
try:
st_tm_mv = self.zapi_client.move_lun(path, tmp_path)
st_nw_mv = self.zapi_client.move_lun(new_path, path)
st_del_old = self.zapi_client.destroy_lun(tmp_path)
except Exception as e:
if st_tm_mv is None:
msg = _("Failure staging LUN %s to tmp.")
raise exception.VolumeBackendAPIException(data=msg % (seg[-1]))
else:
if st_nw_mv is None:
self.zapi_client.move_lun(tmp_path, path)
msg = _("Failure moving new cloned LUN to %s.")
raise exception.VolumeBackendAPIException(
data=msg % (seg[-1]))
elif st_del_old is None:
LOG.error(_LE("Failure deleting staged tmp LUN %s."),
tmp_lun)
else:
LOG.error(_LE("Unknown exception in"
" post clone resize LUN %s."), seg[-1])
LOG.error(_LE("Exception details: %s"), e)
def _get_lun_block_count(self, path):
"""Gets block counts for the LUN."""
LOG.debug("Getting LUN block count.")
lun_infos = self.zapi_client.get_lun_by_args(path=path)
if not lun_infos:
seg = path.split('/')
msg = _('Failure getting LUN info for %s.')
raise exception.VolumeBackendAPIException(data=msg % seg[-1])
lun_info = lun_infos[-1]
bs = int(lun_info.get_child_content('block-size'))
ls = int(lun_info.get_child_content('size'))
block_count = ls / bs
return block_count
def _check_volume_type_for_lun(self, volume, lun, existing_ref,
extra_specs):
"""Checks if lun satifies the volume type."""
raise NotImplementedError()
def manage_existing(self, volume, existing_ref):
"""Brings an existing storage object under Cinder management.
existing_ref can contain source-id or source-name or both.
source-id: lun uuid.
source-name: complete lun path eg. /vol/vol0/lun.
"""
lun = self._get_existing_vol_with_manage_ref(existing_ref)
extra_specs = na_utils.get_volume_extra_specs(volume)
self._check_volume_type_for_lun(volume, lun, existing_ref, extra_specs)
qos_policy_group_info = self._setup_qos_for_volume(volume, extra_specs)
qos_policy_group_name = (
na_utils.get_qos_policy_group_name_from_info(
qos_policy_group_info))
path = lun.get_metadata_property('Path')
if lun.name == volume['name']:
new_path = path
LOG.info(_LI("LUN with given ref %s need not be renamed "
"during manage operation."), existing_ref)
else:
(rest, splitter, name) = path.rpartition('/')
new_path = '%s/%s' % (rest, volume['name'])
self.zapi_client.move_lun(path, new_path)
lun = self._get_existing_vol_with_manage_ref(
{'source-name': new_path})
if qos_policy_group_name is not None:
self.zapi_client.set_lun_qos_policy_group(new_path,
qos_policy_group_name)
self._add_lun_to_table(lun)
LOG.info(_LI("Manage operation completed for LUN with new path"
" %(path)s and uuid %(uuid)s."),
{'path': lun.get_metadata_property('Path'),
'uuid': lun.get_metadata_property('UUID')})
def manage_existing_get_size(self, volume, existing_ref):
"""Return size of volume to be managed by manage_existing.
When calculating the size, round up to the next GB.
"""
lun = self._get_existing_vol_with_manage_ref(existing_ref)
return int(math.ceil(float(lun.size) / units.Gi))
def _get_existing_vol_with_manage_ref(self, existing_ref):
"""Get the corresponding LUN from the storage server."""
uuid = existing_ref.get('source-id')
path = existing_ref.get('source-name')
if not (uuid or path):
reason = _('Reference must contain either source-id'
' or source-name element.')
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=reason)
lun_info = {}
lun_info.setdefault('path', path if path else None)
if hasattr(self, 'vserver') and uuid:
lun_info['uuid'] = uuid
luns = self.zapi_client.get_lun_by_args(**lun_info)
if luns:
for lun in luns:
netapp_lun = self._extract_lun_info(lun)
storage_valid = self._is_lun_valid_on_storage(netapp_lun)
uuid_valid = True
if uuid:
if netapp_lun.get_metadata_property('UUID') == uuid:
uuid_valid = True
else:
uuid_valid = False
if storage_valid and uuid_valid:
return netapp_lun
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref,
reason=(_('LUN not found with given ref %s.') % existing_ref))
def _is_lun_valid_on_storage(self, lun):
"""Validate lun specific to storage system."""
return True
def unmanage(self, volume):
"""Removes the specified volume from Cinder management.
Does not delete the underlying backend storage object.
"""
managed_lun = self._get_lun_from_table(volume['name'])
LOG.info(_LI("Unmanaged LUN with current path %(path)s and uuid "
"%(uuid)s."),
{'path': managed_lun.get_metadata_property('Path'),
'uuid': managed_lun.get_metadata_property('UUID')
or 'unknown'})
def initialize_connection_iscsi(self, volume, connector):
"""Driver entry point to attach a volume to an instance.
Do the LUN masking on the storage system so the initiator can access
the LUN on the target. Also return the iSCSI properties so the
initiator can find the LUN. This implementation does not call
_get_iscsi_properties() to get the properties because cannot store the
LUN number in the database. We only find out what the LUN number will
be during this method call so we construct the properties dictionary
ourselves.
"""
initiator_name = connector['initiator']
name = volume['name']
lun_id = self._map_lun(name, [initiator_name], 'iscsi', None)
LOG.debug("Mapped LUN %(name)s to the initiator %(initiator_name)s",
{'name': name, 'initiator_name': initiator_name})
target_list = self.zapi_client.get_iscsi_target_details()
if not target_list:
raise exception.VolumeBackendAPIException(
data=_('Failed to get LUN target list for the LUN %s') % name)
LOG.debug("Successfully fetched target list for LUN %(name)s and "
"initiator %(initiator_name)s",
{'name': name, 'initiator_name': initiator_name})
preferred_target = self._get_preferred_target_from_list(
target_list)
if preferred_target is None:
msg = _('Failed to get target portal for the LUN %s')
raise exception.VolumeBackendAPIException(data=msg % name)
(address, port) = (preferred_target['address'],
preferred_target['port'])
iqn = self.zapi_client.get_iscsi_service_details()
if not iqn:
msg = _('Failed to get target IQN for the LUN %s')
raise exception.VolumeBackendAPIException(data=msg % name)
properties = na_utils.get_iscsi_connection_properties(lun_id, volume,
iqn, address,
port)
return properties
def _get_preferred_target_from_list(self, target_details_list,
filter=None):
preferred_target = None
for target in target_details_list:
if filter and target['address'] not in filter:
continue
if target.get('interface-enabled', 'true') == 'true':
preferred_target = target
break
if preferred_target is None and len(target_details_list) > 0:
preferred_target = target_details_list[0]
return preferred_target
def terminate_connection_iscsi(self, volume, connector, **kwargs):
"""Driver entry point to unattach a volume from an instance.
Unmask the LUN on the storage system so the given initiator can no
longer access it.
"""
initiator_name = connector['initiator']
name = volume['name']
metadata = self._get_lun_attr(name, 'metadata')
path = metadata['Path']
self._unmap_lun(path, [initiator_name])
LOG.debug("Unmapped LUN %(name)s from the initiator "
"%(initiator_name)s",
{'name': name, 'initiator_name': initiator_name})
def initialize_connection_fc(self, volume, connector):
"""Initializes the connection and returns connection info.
Assign any created volume to a compute node/host so that it can be
used from that host.
The driver returns a driver_volume_type of 'fibre_channel'.
The target_wwn can be a single entry or a list of wwns that
correspond to the list of remote wwn(s) that will export the volume.
Example return values:
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': '500a098280feeba5',
'access_mode': 'rw',
'initiator_target_map': {
'21000024ff406cc3': ['500a098280feeba5'],
'21000024ff406cc2': ['500a098280feeba5']
}
}
}
or
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': ['500a098280feeba5', '500a098290feeba5',
'500a098190feeba5', '500a098180feeba5'],
'access_mode': 'rw',
'initiator_target_map': {
'21000024ff406cc3': ['500a098280feeba5',
'500a098290feeba5'],
'21000024ff406cc2': ['500a098190feeba5',
'500a098180feeba5']
}
}
}
"""
initiators = [fczm_utils.get_formatted_wwn(wwpn)
for wwpn in connector['wwpns']]
volume_name = volume['name']
lun_id = self._map_lun(volume_name, initiators, 'fcp', None)
LOG.debug("Mapped LUN %(name)s to the initiator(s) %(initiators)s",
{'name': volume_name, 'initiators': initiators})
target_wwpns, initiator_target_map, num_paths = (
self._build_initiator_target_map(connector))
if target_wwpns:
LOG.debug("Successfully fetched target details for LUN %(name)s "
"and initiator(s) %(initiators)s",
{'name': volume_name, 'initiators': initiators})
else:
raise exception.VolumeBackendAPIException(
data=_('Failed to get LUN target details for '
'the LUN %s') % volume_name)
target_info = {'driver_volume_type': 'fibre_channel',
'data': {'target_discovered': True,
'target_lun': int(lun_id),
'target_wwn': target_wwpns,
'access_mode': 'rw',
'initiator_target_map': initiator_target_map}}
return target_info
def terminate_connection_fc(self, volume, connector, **kwargs):
"""Disallow connection from connector.
Return empty data if other volumes are in the same zone.
The FibreChannel ZoneManager doesn't remove zones
if there isn't an initiator_target_map in the
return of terminate_connection.
:returns: data - the target_wwns and initiator_target_map if the
zone is to be removed, otherwise the same map with
an empty dict for the 'data' key
"""
initiators = [fczm_utils.get_formatted_wwn(wwpn)
for wwpn in connector['wwpns']]
name = volume['name']
metadata = self._get_lun_attr(name, 'metadata')
path = metadata['Path']
self._unmap_lun(path, initiators)
LOG.debug("Unmapped LUN %(name)s from the initiator %(initiators)s",
{'name': name, 'initiators': initiators})
info = {'driver_volume_type': 'fibre_channel',
'data': {}}
if not self._has_luns_mapped_to_initiators(initiators):
# No more exports for this host, so tear down zone.
LOG.info(_LI("Need to remove FC Zone, building initiator "
"target map"))
target_wwpns, initiator_target_map, num_paths = (
self._build_initiator_target_map(connector))
info['data'] = {'target_wwn': target_wwpns,
'initiator_target_map': initiator_target_map}
return info
def _build_initiator_target_map(self, connector):
"""Build the target_wwns and the initiator target map."""
# get WWPNs from controller and strip colons
all_target_wwpns = self._get_fc_target_wwpns()
all_target_wwpns = [six.text_type(wwpn).replace(':', '')
for wwpn in all_target_wwpns]
target_wwpns = []
init_targ_map = {}
num_paths = 0
if self.lookup_service is not None:
# Use FC SAN lookup to determine which ports are visible.
dev_map = self.lookup_service.get_device_mapping_from_network(
connector['wwpns'],
all_target_wwpns)
for fabric_name in dev_map:
fabric = dev_map[fabric_name]
target_wwpns += fabric['target_port_wwn_list']
for initiator in fabric['initiator_port_wwn_list']:
if initiator not in init_targ_map:
init_targ_map[initiator] = []
init_targ_map[initiator] += fabric['target_port_wwn_list']
init_targ_map[initiator] = list(set(
init_targ_map[initiator]))
for target in init_targ_map[initiator]:
num_paths += 1
target_wwpns = list(set(target_wwpns))
else:
initiator_wwns = connector['wwpns']
target_wwpns = all_target_wwpns
for initiator in initiator_wwns:
init_targ_map[initiator] = target_wwpns
return target_wwpns, init_targ_map, num_paths
|
julianwang/cinder
|
cinder/volume/drivers/netapp/dataontap/block_base.py
|
Python
|
apache-2.0
| 37,442
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,no-member,attribute-defined-outside-init
"""
Some "standard" instruments to collect additional info about workload execution.
.. note:: The run() method of a Workload may perform some "boilerplate" as well as
the actual execution of the workload (e.g. it may contain UI automation
needed to start the workload). This "boilerplate" execution will also
be measured by these instruments. As such, they are not suitable for collected
precise data about specific operations.
"""
import os
import re
import logging
import time
import tarfile
from itertools import izip, izip_longest
from subprocess import CalledProcessError
from wlauto import Instrument, Parameter
from wlauto.core import signal
from wlauto.exceptions import DeviceError, ConfigError
from wlauto.utils.misc import diff_tokens, write_table, check_output, as_relative
from wlauto.utils.misc import ensure_file_directory_exists as _f
from wlauto.utils.misc import ensure_directory_exists as _d
from wlauto.utils.android import ApkInfo
from wlauto.utils.types import list_of_strings
logger = logging.getLogger(__name__)
class SysfsExtractor(Instrument):
name = 'sysfs_extractor'
description = """
Collects the contest of a set of directories, before and after workload execution
and diffs the result.
"""
mount_command = 'mount -t tmpfs -o size={} tmpfs {}'
extract_timeout = 30
tarname = 'sysfs.tar'
DEVICE_PATH = 0
BEFORE_PATH = 1
AFTER_PATH = 2
DIFF_PATH = 3
parameters = [
Parameter('paths', kind=list_of_strings, mandatory=True,
description="""A list of paths to be pulled from the device. These could be directories
as well as files.""",
global_alias='sysfs_extract_dirs'),
Parameter('use_tmpfs', kind=bool, default=None,
description="""
Specifies whether tmpfs should be used to cache sysfile trees and then pull them down
as a tarball. This is significantly faster then just copying the directory trees from
the device directly, bur requres root and may not work on all devices. Defaults to
``True`` if the device is rooted and ``False`` if it is not.
"""),
Parameter('tmpfs_mount_point', default=None,
description="""Mount point for tmpfs partition used to store snapshots of paths."""),
Parameter('tmpfs_size', default='32m',
description="""Size of the tempfs partition."""),
]
def initialize(self, context):
if not self.device.is_rooted and self.use_tmpfs: # pylint: disable=access-member-before-definition
raise ConfigError('use_tempfs must be False for an unrooted device.')
elif self.use_tmpfs is None: # pylint: disable=access-member-before-definition
self.use_tmpfs = self.device.is_rooted
if self.use_tmpfs:
self.on_device_before = self.device.path.join(self.tmpfs_mount_point, 'before')
self.on_device_after = self.device.path.join(self.tmpfs_mount_point, 'after')
if not self.device.file_exists(self.tmpfs_mount_point):
self.device.execute('mkdir -p {}'.format(self.tmpfs_mount_point), as_root=True)
self.device.execute(self.mount_command.format(self.tmpfs_size, self.tmpfs_mount_point),
as_root=True)
def setup(self, context):
before_dirs = [
_d(os.path.join(context.output_directory, 'before', self._local_dir(d)))
for d in self.paths
]
after_dirs = [
_d(os.path.join(context.output_directory, 'after', self._local_dir(d)))
for d in self.paths
]
diff_dirs = [
_d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
for d in self.paths
]
self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs)
if self.use_tmpfs:
for d in self.paths:
before_dir = self.device.path.join(self.on_device_before,
self.device.path.dirname(as_relative(d)))
after_dir = self.device.path.join(self.on_device_after,
self.device.path.dirname(as_relative(d)))
if self.device.file_exists(before_dir):
self.device.execute('rm -rf {}'.format(before_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(before_dir), as_root=True)
if self.device.file_exists(after_dir):
self.device.execute('rm -rf {}'.format(after_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
def slow_start(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_before, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not rooted
for dev_dir, before_dir, _, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, before_dir)
def slow_stop(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_after, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not using tmpfs
for dev_dir, _, after_dir, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, after_dir)
def update_result(self, context):
if self.use_tmpfs:
on_device_tarball = self.device.path.join(self.device.working_directory, self.tarname)
on_host_tarball = self.device.path.join(context.output_directory, self.tarname + ".gz")
self.device.execute('{} tar cf {} -C {} .'.format(self.device.busybox,
on_device_tarball,
self.tmpfs_mount_point),
as_root=True)
self.device.execute('chmod 0777 {}'.format(on_device_tarball), as_root=True)
self.device.execute('{} gzip {}'.format(self.device.busybox,
on_device_tarball))
self.device.pull_file(on_device_tarball + ".gz", on_host_tarball)
with tarfile.open(on_host_tarball, 'r:gz') as tf:
tf.extractall(context.output_directory)
self.device.delete_file(on_device_tarball + ".gz")
os.remove(on_host_tarball)
for paths in self.device_and_host_paths:
after_dir = paths[self.AFTER_PATH]
dev_dir = paths[self.DEVICE_PATH].strip('*') # remove potential trailing '*'
if (not os.listdir(after_dir) and
self.device.file_exists(dev_dir) and
self.device.listdir(dev_dir)):
self.logger.error('sysfs files were not pulled from the device.')
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
_diff_sysfs_dirs(before_dir, after_dir, diff_dir)
def teardown(self, context):
self._one_time_setup_done = []
def finalize(self, context):
if self.use_tmpfs:
try:
self.device.execute('umount {}'.format(self.tmpfs_mount_point), as_root=True)
except (DeviceError, CalledProcessError):
# assume a directory but not mount point
pass
self.device.execute('rm -rf {}'.format(self.tmpfs_mount_point),
as_root=True, check_exit_code=False)
def validate(self):
if not self.tmpfs_mount_point: # pylint: disable=access-member-before-definition
self.tmpfs_mount_point = self.device.path.join(self.device.working_directory, 'temp-fs')
def _local_dir(self, directory):
return os.path.dirname(as_relative(directory).replace(self.device.path.sep, os.sep))
class ExecutionTimeInstrument(Instrument):
name = 'execution_time'
description = """
Measure how long it took to execute the run() methods of a Workload.
"""
priority = 15
def __init__(self, device, **kwargs):
super(ExecutionTimeInstrument, self).__init__(device, **kwargs)
self.start_time = None
self.end_time = None
def on_run_start(self, context):
signal.connect(self.get_start_time, signal.BEFORE_WORKLOAD_EXECUTION, priority=self.priority)
signal.connect(self.get_stop_time, signal.AFTER_WORKLOAD_EXECUTION, priority=self.priority)
def get_start_time(self, context):
self.start_time = time.time()
def get_stop_time(self, context):
self.end_time = time.time()
def update_result(self, context):
execution_time = self.end_time - self.start_time
context.result.add_metric('execution_time', execution_time, 'seconds')
class ApkVersion(Instrument):
name = 'apk_version'
description = """
(DEPRECATED) Extracts APK versions for workloads that have them.
This instrument is deprecated and should not be used. It will be removed in
future versions of Workload Automation.
Versions of Android packages are now automatically attached to the results as
"apk_version" classfiers.
"""
def __init__(self, device, **kwargs):
super(ApkVersion, self).__init__(device, **kwargs)
self.apk_info = None
def setup(self, context):
if hasattr(context.workload, 'apk_file'):
self.apk_info = ApkInfo(context.workload.apk_file)
else:
self.apk_info = None
def update_result(self, context):
if self.apk_info:
context.result.add_metric(self.name, self.apk_info.version_name)
class InterruptStatsInstrument(Instrument):
name = 'interrupts'
description = """
Pulls the ``/proc/interrupts`` file before and after workload execution and diffs them
to show what interrupts occurred during that time.
"""
def __init__(self, device, **kwargs):
super(InterruptStatsInstrument, self).__init__(device, **kwargs)
self.before_file = None
self.after_file = None
self.diff_file = None
def setup(self, context):
self.before_file = os.path.join(context.output_directory, 'before', 'proc', 'interrupts')
self.after_file = os.path.join(context.output_directory, 'after', 'proc', 'interrupts')
self.diff_file = os.path.join(context.output_directory, 'diff', 'proc', 'interrupts')
def start(self, context):
with open(_f(self.before_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def stop(self, context):
with open(_f(self.after_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def update_result(self, context):
# If workload execution failed, the after_file may not have been created.
if os.path.isfile(self.after_file):
_diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
class DynamicFrequencyInstrument(SysfsExtractor):
name = 'cpufreq'
description = """
Collects dynamic frequency (DVFS) settings before and after workload execution.
"""
tarname = 'cpufreq.tar'
parameters = [
Parameter('paths', mandatory=False, override=True),
]
def setup(self, context):
self.paths = ['/sys/devices/system/cpu']
if self.use_tmpfs:
self.paths.append('/sys/class/devfreq/*') # the '*' would cause problems for adb pull.
super(DynamicFrequencyInstrument, self).setup(context)
def validate(self):
# temp-fs would have been set in super's validate, if not explicitly specified.
if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
self.tmpfs_mount_point += '-cpufreq'
def _diff_interrupt_files(before, after, result): # pylint: disable=R0914
output_lines = []
with open(before) as bfh:
with open(after) as ofh:
for bline, aline in izip(bfh, ofh):
bchunks = bline.strip().split()
while True:
achunks = aline.strip().split()
if achunks[0] == bchunks[0]:
diffchunks = ['']
diffchunks.append(achunks[0])
diffchunks.extend([diff_tokens(b, a) for b, a
in zip(bchunks[1:], achunks[1:])])
output_lines.append(diffchunks)
break
else: # new category appeared in the after file
diffchunks = ['>'] + achunks
output_lines.append(diffchunks)
try:
aline = ofh.next()
except StopIteration:
break
# Offset heading columns by one to allow for row labels on subsequent
# lines.
output_lines[0].insert(0, '')
# Any "columns" that do not have headings in the first row are not actually
# columns -- they are a single column where space-spearated words got
# split. Merge them back together to prevent them from being
# column-aligned by write_table.
table_rows = [output_lines[0]]
num_cols = len(output_lines[0])
for row in output_lines[1:]:
table_row = row[:num_cols]
table_row.append(' '.join(row[num_cols:]))
table_rows.append(table_row)
with open(result, 'w') as wfh:
write_table(table_rows, wfh)
def _diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
before_files = []
os.path.walk(before,
lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
before_files
)
before_files = filter(os.path.isfile, before_files)
files = [os.path.relpath(f, before) for f in before_files]
after_files = [os.path.join(after, f) for f in files]
diff_files = [os.path.join(result, f) for f in files]
for bfile, afile, dfile in zip(before_files, after_files, diff_files):
if not os.path.isfile(afile):
logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
continue
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
with open(_f(dfile), 'w') as dfh:
for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
if aline is None:
logger.debug('Lines missing from {}'.format(afile))
break
bchunks = re.split(r'(\W+)', bline)
achunks = re.split(r'(\W+)', aline)
if len(bchunks) != len(achunks):
logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
dfh.write('xxx ' + bline)
continue
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
(bchunks[0] == achunks[0])):
# if there are only two columns and the first column is the
# same, assume it's a "header" column and do not diff it.
dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
else:
dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
dfh.write(''.join(dchunks))
|
chase-qi/workload-automation
|
wlauto/instrumentation/misc/__init__.py
|
Python
|
apache-2.0
| 17,103
|
# Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for no_pivot_ldl."""
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import no_pivot_ldl
from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import simple_robustified_cholesky
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class NoPivotLDLTest(test_util.TestCase):
def _randomDiag(self, n, batch_shape, low, high, forcemin=None, seed=42):
np.random.seed(seed)
shape = batch_shape + [n]
diag = np.random.uniform(low, high, size=shape)
if forcemin:
assert forcemin < low
diag = np.where(diag == np.min(diag, axis=-1)[..., np.newaxis],
forcemin, diag)
return diag
def _randomTril(self, n, batch_shape, seed=42):
np.random.seed(seed)
unit_tril = np.random.standard_normal(batch_shape + [n, n])
unit_tril = np.tril(unit_tril)
unit_tril[..., range(n), range(n)] = 1.
return unit_tril
def _randomSymmetricMatrix(self, n, batch_shape, low, high,
forcemin=None, seed=42):
diag = self._randomDiag(n, batch_shape, low, high, forcemin, seed)
unit_tril = self._randomTril(n, batch_shape, seed)
return np.einsum('...ij,...j,...kj->...ik', unit_tril, diag, unit_tril)
def testLDLRandomPSD(self):
matrix = self._randomSymmetricMatrix(
10, [2, 1, 3], 1e-6, 10., forcemin=0., seed=42)
left, diag = self.evaluate(no_pivot_ldl(matrix))
reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left)
self.assertAllClose(matrix, reconstruct)
def testLDLIndefinite(self):
matrix = [[1., 2.], [2., 1.]]
left, diag = self.evaluate(no_pivot_ldl(matrix))
reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left)
self.assertAllClose(matrix, reconstruct)
def testSimpleIsCholeskyRandomPD(self):
matrix = self._randomSymmetricMatrix(10, [2, 1, 3], 1e-6, 10., seed=42)
chol, left = self.evaluate(
(tf.linalg.cholesky(matrix),
simple_robustified_cholesky(matrix)))
self.assertAllClose(chol, left)
def testSimpleIndefinite(self):
matrix = [[1., 2.], [2., 1.]]
left = self.evaluate(
simple_robustified_cholesky(matrix, tol=.1))
reconstruct = np.einsum('...ij,...kj->...ik', left, left)
eigv, _ = self.evaluate(tf.linalg.eigh(reconstruct))
self.assertAllTrue(eigv > 0.)
def testXlaCompileBug(self):
inp = tf.Variable([[2., 1.], [1., 2.]])
self.evaluate(inp.initializer)
alt_chol = simple_robustified_cholesky
alt_chol_nojit = tf.function(alt_chol, autograph=False, jit_compile=False)
alt_chol_jit = tf.function(alt_chol, autograph=False, jit_compile=True)
answer = np.array([[1.4142135, 0.], [0.70710677, 1.2247449]])
self.assertAllClose(self.evaluate(alt_chol(inp)), answer)
self.assertAllClose(self.evaluate(alt_chol_nojit(inp)), answer)
self.assertAllClose(self.evaluate(alt_chol_jit(inp)), answer)
with tf.GradientTape():
chol_with_grad = alt_chol(inp)
chol_nojit_with_grad = alt_chol_nojit(inp)
# Not supported by TF-XLA (WAI), see b/193584244
# chol_jit_with_grad = alt_chol_jit(inp)
self.assertAllClose(self.evaluate(chol_with_grad), answer)
self.assertAllClose(self.evaluate(chol_nojit_with_grad), answer)
# But wrapping the tape in tf.function should work.
@tf.function(autograph=False, jit_compile=True)
def jit_with_grad(mat):
with tf.GradientTape():
return alt_chol_jit(mat)
self.assertAllClose(self.evaluate(jit_with_grad(inp)), answer)
if __name__ == '__main__':
test_util.main()
|
tensorflow/probability
|
tensorflow_probability/python/experimental/linalg/no_pivot_ldl_test.py
|
Python
|
apache-2.0
| 4,358
|
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import time
from catkin_pkg.package import InvalidPackage
from catkin_tools.argument_parsing import add_context_args
from catkin_tools.argument_parsing import add_cmake_and_make_and_catkin_make_args
from catkin_tools.common import format_time_delta
from catkin_tools.common import is_tty
from catkin_tools.common import log
from catkin_tools.common import find_enclosing_package
from catkin_tools.context import Context
from catkin_tools.terminal_color import set_color
from catkin_tools.metadata import get_metadata
from catkin_tools.metadata import update_metadata
from catkin_tools.resultspace import load_resultspace_environment
from .color import clr
from .common import get_build_type
from .build import build_isolated_workspace
from .build import determine_packages_to_be_built
from .build import topological_order_packages
from .build import verify_start_with_option
def prepare_arguments(parser):
parser.description = "Build one or more packages in a catkin workspace.\
This invokes `CMake`, `make`, and optionally `make install` for either all\
or the specified packages in a catkin workspace.\
\
Arguments passed to this verb can temporarily override persistent options\
stored in the catkin profile config. If you want to save these options, use\
the --save-config argument. To see the current config, use the\
`catkin config` command."
# Workspace / profile args
add_context_args(parser)
# Sub-commands
add = parser.add_argument
add('--dry-run', '-d', action='store_true', default=False,
help='List the packages which will be built with the given arguments without building them.')
# What packages to build
pkg_group = parser.add_argument_group('Packages', 'Control which packages get built.')
add = pkg_group.add_argument
add('packages', metavar='PKGNAME', nargs='*',
help='Workspace packages to build, package dependencies are built as well unless --no-deps is used. '
'If no packages are given, then all the packages are built.')
add('--this', dest='build_this', action='store_true', default=False,
help='Build the package containing the current working directory.')
add('--no-deps', action='store_true', default=False,
help='Only build specified packages, not their dependencies.')
start_with_group = pkg_group.add_mutually_exclusive_group()
add = start_with_group.add_argument
add('--start-with', metavar='PKGNAME', type=str,
help='Build a given package and those which depend on it, skipping any before it.')
add('--start-with-this', action='store_true', default=False,
help='Similar to --start-with, starting with the package containing the current directory.')
# Build options
build_group = parser.add_argument_group('Build', 'Control the build behaiovr.')
add = build_group.add_argument
add('--force-cmake', action='store_true', default=None,
help='Runs cmake explicitly for each catkin package.')
add('--no-install-lock', action='store_true', default=None,
help='Prevents serialization of the install steps, which is on by default to prevent file install collisions')
config_group = parser.add_argument_group('Config', 'Parameters for the underlying buildsystem.')
add = config_group.add_argument
add('--save-config', action='store_true', default=False,
help='Save any configuration options in this section for the next build invocation.')
add_cmake_and_make_and_catkin_make_args(config_group)
# Behavior
behavior_group = parser.add_argument_group('Interface', 'The behavior of the command-line interface.')
add = behavior_group.add_argument
add('--force-color', action='store_true', default=False,
help='Forces catkin build to ouput in color, even when the terminal does not appear to support it.')
add('--verbose', '-v', action='store_true', default=False,
help='Print output from commands in ordered blocks once the command finishes.')
add('--interleave-output', '-i', action='store_true', default=False,
help='Prevents ordering of command output when multiple commands are running at the same time.')
add('--no-status', action='store_true', default=False,
help='Suppresses status line, useful in situations where carriage return is not properly supported.')
add('--no-notify', action='store_true', default=False,
help='Suppresses system popup notification.')
return parser
def dry_run(context, packages, no_deps, start_with):
# Print Summary
log(context.summary())
# Find list of packages in the workspace
packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(packages, context)
# Assert start_with package is in the workspace
verify_start_with_option(start_with, packages, all_packages, packages_to_be_built + packages_to_be_built_deps)
if not no_deps:
# Extend packages to be built to include their deps
packages_to_be_built.extend(packages_to_be_built_deps)
# Also resort
packages_to_be_built = topological_order_packages(dict(packages_to_be_built))
# Print packages
log("Packages to be built:")
max_name_len = str(max([len(pkg.name) for pth, pkg in packages_to_be_built]))
prefix = clr('@{pf}' + ('------ ' if start_with else '- ') + '@|')
for pkg_path, pkg in packages_to_be_built:
build_type = get_build_type(pkg)
if build_type == 'catkin' and 'metapackage' in [e.tagname for e in pkg.exports]:
build_type = 'metapackage'
if start_with and pkg.name == start_with:
start_with = None
log(clr("{prefix}@{cf}{name:<" + max_name_len + "}@| (@{yf}{build_type}@|)")
.format(prefix=clr('@!@{kf}(skip)@| ') if start_with else prefix, name=pkg.name, build_type=build_type))
log("Total packages: " + str(len(packages_to_be_built)))
def main(opts):
# Context-aware args
if opts.build_this or opts.start_with_this:
# Determine the enclosing package
try:
this_package = find_enclosing_package()
except InvalidPackage:
pass
# Handle context-based package building
if opts.build_this:
if this_package:
opts.packages += [this_package]
else:
sys.exit("catkin build: --this was specified, but this directory is not contained by a catkin package.")
# If --start--with was used without any packages and --this was specified, start with this package
if opts.start_with_this:
if this_package:
opts.start_with = this_package
else:
sys.exit("catkin build: --this was specified, but this directory is not contained by a catkin package.")
if opts.no_deps and not opts.packages:
sys.exit("With --no-deps, you must specify packages to build.")
if not opts.force_color and not is_tty(sys.stdout):
set_color(False)
# Load the context
ctx = Context.Load(opts.workspace, opts.profile, opts)
# Load the environment of the workspace to extend
if ctx.extend_path is not None:
try:
load_resultspace_environment(ctx.extend_path)
except IOError as exc:
log(clr("@!@{rf}Error:@| Unable to extend workspace from \"%s\": %s" %
(ctx.extend_path, exc.message)))
return 1
# Display list and leave the filesystem untouched
if opts.dry_run:
dry_run(ctx, opts.packages, opts.no_deps, opts.start_with)
return
# Check if the context is valid before writing any metadata
if not ctx.source_space_exists():
print("catkin build: error: Unable to find source space `%s`" % ctx.source_space_abs)
return 1
# Always save the last context under the build verb
update_metadata(ctx.workspace, ctx.profile, 'build', ctx.get_stored_dict())
build_metadata = get_metadata(ctx.workspace, ctx.profile, 'build')
if build_metadata.get('needs_force', False):
opts.force_cmake = True
update_metadata(ctx.workspace, ctx.profile, 'build', {'needs_force': False})
# Save the context as the configuration
if opts.save_config:
Context.Save(ctx)
start = time.time()
try:
return build_isolated_workspace(
ctx,
packages=opts.packages,
start_with=opts.start_with,
no_deps=opts.no_deps,
jobs=opts.parallel_jobs,
force_cmake=opts.force_cmake,
force_color=opts.force_color,
quiet=not opts.verbose,
interleave_output=opts.interleave_output,
no_status=opts.no_status,
lock_install=not opts.no_install_lock,
no_notify=opts.no_notify
)
finally:
log("[build] Runtime: {0}".format(format_time_delta(time.time() - start)))
|
xqms/catkin_tools
|
catkin_tools/verbs/catkin_build/cli.py
|
Python
|
apache-2.0
| 9,623
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from nova import context
from nova import exception
from nova.objects import cell_mapping
from nova.objects import instance_mapping
from nova import test
from nova.tests import fixtures
sample_mapping = {'instance_uuid': '',
'cell_id': 3,
'project_id': 'fake-project'}
sample_cell_mapping = {'id': 3,
'uuid': '',
'name': 'fake-cell',
'transport_url': 'rabbit:///',
'database_connection': 'mysql:///'}
def create_cell_mapping(**kwargs):
args = sample_cell_mapping.copy()
if 'uuid' not in kwargs:
args['uuid'] = uuidutils.generate_uuid()
args.update(kwargs)
ctxt = context.RequestContext('fake-user', 'fake-project')
return cell_mapping.CellMapping._create_in_db(ctxt, args)
def create_mapping(**kwargs):
args = sample_mapping.copy()
if 'instance_uuid' not in kwargs:
args['instance_uuid'] = uuidutils.generate_uuid()
args.update(kwargs)
ctxt = context.RequestContext('fake-user', 'fake-project')
return instance_mapping.InstanceMapping._create_in_db(ctxt, args)
class InstanceMappingTestCase(test.NoDBTestCase):
USES_DB_SELF = True
def setUp(self):
super(InstanceMappingTestCase, self).setUp()
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
self.mapping_obj = instance_mapping.InstanceMapping()
def test_get_by_instance_uuid(self):
cell_mapping = create_cell_mapping()
mapping = create_mapping()
db_mapping = self.mapping_obj._get_by_instance_uuid_from_db(
self.context, mapping['instance_uuid'])
for key in [key for key in self.mapping_obj.fields.keys()
if key != 'cell_mapping']:
self.assertEqual(db_mapping[key], mapping[key])
self.assertEqual(db_mapping['cell_mapping']['id'], cell_mapping['id'])
def test_get_by_instance_uuid_not_found(self):
self.assertRaises(exception.InstanceMappingNotFound,
self.mapping_obj._get_by_instance_uuid_from_db, self.context,
uuidutils.generate_uuid())
def test_save_in_db(self):
mapping = create_mapping()
cell_mapping = create_cell_mapping()
self.mapping_obj._save_in_db(self.context, mapping['instance_uuid'],
{'cell_id': cell_mapping['id']})
db_mapping = self.mapping_obj._get_by_instance_uuid_from_db(
self.context, mapping['instance_uuid'])
for key in [key for key in self.mapping_obj.fields.keys()
if key not in ['cell_id', 'cell_mapping', 'updated_at']]:
self.assertEqual(db_mapping[key], mapping[key])
self.assertEqual(db_mapping['cell_id'], cell_mapping['id'])
def test_destroy_in_db(self):
mapping = create_mapping()
self.mapping_obj._get_by_instance_uuid_from_db(self.context,
mapping['instance_uuid'])
self.mapping_obj._destroy_in_db(self.context, mapping['instance_uuid'])
self.assertRaises(exception.InstanceMappingNotFound,
self.mapping_obj._get_by_instance_uuid_from_db, self.context,
mapping['instance_uuid'])
def test_cell_id_nullable(self):
# Just ensure this doesn't raise
create_mapping(cell_id=None)
def test_modify_cell_mapping(self):
inst_mapping = instance_mapping.InstanceMapping(context=self.context)
inst_mapping.instance_uuid = uuidutils.generate_uuid()
inst_mapping.project_id = self.context.project_id
inst_mapping.cell_mapping = None
inst_mapping.create()
c_mapping = cell_mapping.CellMapping(
self.context,
uuid=uuidutils.generate_uuid(),
name="cell0",
transport_url="none:///",
database_connection="fake:///")
c_mapping.create()
inst_mapping.cell_mapping = c_mapping
inst_mapping.save()
result_mapping = instance_mapping.InstanceMapping.get_by_instance_uuid(
self.context, inst_mapping.instance_uuid)
self.assertEqual(result_mapping.cell_mapping.id,
c_mapping.id)
class InstanceMappingListTestCase(test.NoDBTestCase):
USES_DB_SELF = True
def setUp(self):
super(InstanceMappingListTestCase, self).setUp()
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
self.list_obj = instance_mapping.InstanceMappingList()
def test_get_by_project_id_from_db(self):
project_id = 'fake-project'
mappings = {}
mapping = create_mapping(project_id=project_id)
mappings[mapping['instance_uuid']] = mapping
mapping = create_mapping(project_id=project_id)
mappings[mapping['instance_uuid']] = mapping
db_mappings = self.list_obj._get_by_project_id_from_db(
self.context, project_id)
for db_mapping in db_mappings:
mapping = mappings[db_mapping.instance_uuid]
for key in instance_mapping.InstanceMapping.fields.keys():
self.assertEqual(db_mapping[key], mapping[key])
def test_instance_mapping_list_get_by_cell_id(self):
"""Tests getting all of the InstanceMappings for a given CellMapping id
"""
# we shouldn't have any instance mappings yet
inst_mapping_list = (
instance_mapping.InstanceMappingList.get_by_cell_id(
self.context, sample_cell_mapping['id'])
)
self.assertEqual(0, len(inst_mapping_list))
# now create an instance mapping in a cell
db_inst_mapping1 = create_mapping()
# let's also create an instance mapping that's not in a cell to make
# sure our filtering is working
db_inst_mapping2 = create_mapping(cell_id=None)
self.assertIsNone(db_inst_mapping2['cell_id'])
# now we should list out one instance mapping for the cell
inst_mapping_list = (
instance_mapping.InstanceMappingList.get_by_cell_id(
self.context, db_inst_mapping1['cell_id'])
)
self.assertEqual(1, len(inst_mapping_list))
self.assertEqual(db_inst_mapping1['id'], inst_mapping_list[0].id)
|
vmturbo/nova
|
nova/tests/functional/db/test_instance_mapping.py
|
Python
|
apache-2.0
| 7,062
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())
|
crate/crate-python
|
src/crate/client/sqlalchemy/tests/create_table_test.py
|
Python
|
apache-2.0
| 6,206
|
import numpy as np
import matplotlib.pyplot as plt #Used for graphing audio tests
import pyaudio as pa
import wave
from time import sleep
#Constants used for sampling audio
CHUNK = 1024
FORMAT = pa.paInt16
CHANNELS = 1
RATE = 44100 # Must match rate at which mic actually samples sound
RECORD_TIMEFRAME = 1.0 #Time in seconds
OUTPUT_FILE = "sample.wav"
#Flag for plotting sound input waves for debugging and implementation purposes
TESTING_GRAPHS = True
def sampleAudio(wav_name=OUTPUT_FILE):
"""Samples audio from the microphone for a given period of time.
The output file is saved as [wav_name]
Code here taken from the front page of:
< https://people.csail.mit.edu/hubert/pyaudio/ > """
# Open the recording session
rec_session = pa.PyAudio()
stream = rec_session.open(format=FORMAT,
channels=CHANNELS,rate=RATE,input=True,frames_per_buffer=CHUNK)
print("Start recording")
frames = []
# Sample audio frames for given time period
for i in range(0, int(RATE/CHUNK*RECORD_TIMEFRAME)):
data = stream.read(CHUNK)
frames.append(data)
# Close the recording session
stream.stop_stream()
stream.close()
rec_session.terminate()
#Create the wav file for analysis
output_wav = wave.open(wav_name,"wb")
output_wav.setnchannels(CHANNELS)
output_wav.setsampwidth(rec_session.get_sample_size(FORMAT))
output_wav.setframerate(RATE)
output_wav.writeframes(b''.join(frames))
output_wav.close()
def getAvgFreq(wav_file=OUTPUT_FILE):
"""Analyzes the audio sample [wav_file] (must be a 16-bit WAV file with
one channel) and returns maximum magnitude of the most prominent sound
and the frequency thresholds it falls between.
Basic procedure of processing audio taken from:
< http://samcarcagno.altervista.org/blog/basic-sound-processing-python/ >"""
#Open wav file for analysis
sound_sample = wave.open(wav_file, "rb")
#Get sampling frequency
sample_freq = sound_sample.getframerate()
#Extract audio frames to be analyzed
# audio_frames = sound_sample.readframes(sound_sample.getnframes())
audio_frames = sound_sample.readframes(1024)
converted_val = []
#COnvert byte objects into frequency values per frame
for i in range(0,len(audio_frames),2):
if ord(audio_frames[i+1])>127:
converted_val.append(-(ord(audio_frames[i])+(256*(255-ord(audio_frames[i+1])))))
else:
converted_val.append(ord(audio_frames[i])+(256*ord(audio_frames[i+1])))
#Fit into numpy array for FFT analysis
freq_per_frame = np.array(converted_val)
# Get amplitude of soundwave section
freq = np.fft.fft(freq_per_frame)
amplitude = np.abs(freq)
amplitude = amplitude/float(len(freq_per_frame))
amplitude = amplitude**2
#Get bins/thresholds for frequencies
freqbins = np.fft.fftfreq(CHUNK,1.0/sample_freq)
x = np.linspace(0.0,1.0,1024)
# Plot data if need visualization
if(TESTING_GRAPHS):
#Plot raw data
plt.plot(converted_val)
plt.title("Raw Data")
plt.xlabel("Time (ms)")
plt.ylabel("Frequency (Hz)")
plt.show()
#Plot frequency histogram
plt.plot(freqbins[:16],amplitude[:16])
plt.title("Processed Data")
plt.xlabel("Frequency Bins")
plt.ylabel("Magnitude")
plt.show()
#Get the range that the max amplitude falls in. This represents the loudest noise
magnitude = np.amax(amplitude)
loudest = np.argmax(amplitude)
lower_thres = freqbins[loudest]
upper_thres = (freqbins[1]-freqbins[0])+lower_thres
#Close wav file
sound_sample.close()
#Return the magnitude of the sound wave and its frequency threshold for analysis
return magnitude, lower_thres, upper_thres
#Use for testing microphone input
if __name__ == "__main__":
# print("Wait 3 seconds to start...")
# sleep(3)
print("Recording!")
sampleAudio(OUTPUT_FILE)
print("Stop recording!")
print("Analyzing...")
mag, lower, upper = getAvgFreq(OUTPUT_FILE)
print("Magnitude is "+str(mag))
print("Lower bin threshold is "+str(lower))
print("Upper bin threshold is "+str(upper))
|
cornell-cup/cs-minibot-platform
|
python-interface/src/MiniBotFramework/Sound/live_audio_sample.py
|
Python
|
apache-2.0
| 3,940
|
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
from ctypes import POINTER, c_double, c_int64
from pyscf.nao.m_libnao import libnao
libnao.ao_eval.argtypes = (
POINTER(c_int64), # nmult
POINTER(c_double), # psi_log_rl
POINTER(c_int64), # nr
POINTER(c_double), # rhomin_jt
POINTER(c_double), # dr_jt
POINTER(c_int64), # mu2j
POINTER(c_int64), # mu2s
POINTER(c_double), # mu2rcut
POINTER(c_double), # rvec_atom_center
POINTER(c_int64), # ncoords
POINTER(c_double), # coords
POINTER(c_int64), # norbs
POINTER(c_double), # res[orb, icoord]
POINTER(c_int64)) # ldres leading dimension of res (ncoords)
#
#
#
def ao_eval_libnao_(ao, rat, isp, crds, res):
"""
Compute the values of atomic orbitals on given grid points
Args:
ao : instance of ao_log_c class
rat : vector where the atomic orbitals from "ao" are centered
isp : specie index for which we compute
crds: coordinates on which we compute
Returns:
res[norbs,ncoord] : array of atomic orbital values
"""
#print(res_copy.flags)
rat_copy = np.require(rat, dtype=c_double, requirements='C')
crd_copy = np.require(crds, dtype=c_double, requirements='C')
res_copy = np.require(res, dtype=c_double, requirements='CW')
mu2j = np.require(ao.sp_mu2j[isp], dtype=c_int64, requirements='C')
mu2s = np.require(ao.sp_mu2s[isp], dtype=c_int64, requirements='C')
mu2rcut = np.require(ao.sp_mu2rcut[isp], dtype=c_double, requirements='C')
ff = np.require(ao.psi_log_rl[isp], dtype=c_double, requirements='C')
libnao.ao_eval(
c_int64(ao.sp2nmult[isp]),
ff.ctypes.data_as(POINTER(c_double)),
c_int64(ao.nr),
c_double(ao.interp_rr.gammin_jt),
c_double(ao.interp_rr.dg_jt),
mu2j.ctypes.data_as(POINTER(c_int64)),
mu2s.ctypes.data_as(POINTER(c_int64)),
mu2rcut.ctypes.data_as(POINTER(c_double)),
rat_copy.ctypes.data_as(POINTER(c_double)),
c_int64(crd_copy.shape[0]),
crd_copy.ctypes.data_as(POINTER(c_double)),
c_int64(ao.sp2norbs[isp]),
res_copy.ctypes.data_as(POINTER(c_double)),
c_int64(res.shape[1]) )
res = res_copy
return 0
#
# See above
#
def ao_eval_libnao(ao, ra, isp, coords):
res = np.zeros((ao.sp2norbs[isp],coords.shape[0]), dtype='float64')
ao_eval_libnao_(ao, ra, isp, coords, res)
return res
if __name__ == '__main__':
from pyscf.nao.m_system_vars import system_vars_c
from pyscf.nao.m_ao_eval import ao_eval
from pyscf.nao.m_ao_eval_libnao import ao_eval_libnao
sv = system_vars_c()
ra = np.array([0.3, -0.5, 0.77], dtype='float64')
#coords = np.array([[0.07716887, 2.82933578, 3.73214881]])
coords = np.random.rand(35580,3)*5.0
print('ao_val2 (reference)')
ao_val1 = ao_eval(sv.ao_log, ra, 0, coords)
print('ao_val2_libnao')
ao_val2 = ao_eval_libnao(sv.ao_log, ra, 0, coords)
print(np.allclose(ao_val1,ao_val2))
for iorb,[oo1,oo2] in enumerate(zip(ao_val1,ao_val2)):
print(iorb, abs(oo1-oo2).argmax(), abs(oo1-oo2).max(), coords[abs(oo1-oo2).argmax(),:])
|
gkc1000/pyscf
|
pyscf/nao/m_ao_eval_libnao.py
|
Python
|
apache-2.0
| 3,652
|
# Azure will execute first file.
# sys.path.append is required by Azure Web Jobs. It requires that all packages are provided to it in zip file.
# env\Lib\site-packages is virtual env path in Windows
import sys
sys.path.append("env\Lib\site-packages")
import logging
import logging.config
from datetime import datetime
import config
from database import DataStore
from ifttt import IFTTT
logging.config.fileConfig('log.config')
logger = logging.getLogger(config.logger_name)
def myExceptionHook(exctype, value, traceback):
logger.error(value)
sys.__excepthook__(exctype, value, traceback)
if __name__ == '__main__':
sys.excepthook = myExceptionHook
print("Running IFTTT checker at %s" % datetime.utcnow())
store = DataStore(config.db_server, config.db_name, config.db_user, config.db_password)
rows = store.getSensorBatteryStatuses()
current_hour = datetime.utcnow().hour
for row in rows:
sensor_id = row[0]
battery = row[1]
cable = row[2]
if battery <= 15 and cable == 0 and current_hour > 19:
logger.debug("Request charging %s (%s : %s)" % (sensor_id, battery, cable))
IFTTT.sendEvent(config.ifttt_api_key, sensor_id + config.ifttt_event_on)
# Stop charging when nearing 100
if cable == 1 and battery > 96:
logger.debug("Request unplug %s (%s : %s)" % (sensor_id, battery, cable))
IFTTT.sendEvent(config.ifttt_api_key, sensor_id + config.ifttt_event_off)
|
ttu/cubesensors-iot-azure
|
src/python_checker/checker.py
|
Python
|
apache-2.0
| 1,550
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from urllib import request
from tests.integrated import base
class StatusTestCase(base.IntegrationTest):
def _get_config(self):
port = base.get_free_port()
self.url = "http://localhost:%s" % port
conf = {
"service": {
"name": "status",
"module": "rallyci.services.status",
"listen": ["localhost", port],
}
}
return [[conf], [port]]
def test_index(self):
r = request.urlopen(self.url)
self.assertIsNotNone(r)
|
redixin/rally-ci
|
tests/integrated/test_services_status.py
|
Python
|
apache-2.0
| 1,144
|
# Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABCMeta
from abc import abstractmethod
import six
@six.add_metaclass(ABCMeta)
class CryptographicEngine(object):
"""
The abstract base class of the cryptographic engine hierarchy.
A cryptographic engine is responsible for generating all cryptographic
objects and conducting all cryptographic operations for a KMIP server
instance.
"""
@abstractmethod
def create_symmetric_key(self, algorithm, length):
"""
Create a symmetric key.
Args:
algorithm(CryptographicAlgorithm): An enumeration specifying the
algorithm for which the created key will be compliant.
length(int): The length of the key to be created. This value must
be compliant with the constraints of the provided algorithm.
Returns:
dict: A dictionary containing the key data, with the following
key/value fields:
* value - the bytes of the key
* format - a KeyFormatType enumeration for the bytes format
"""
@abstractmethod
def create_asymmetric_key_pair(self, algorithm, length):
"""
Create an asymmetric key pair.
Args:
algorithm(CryptographicAlgorithm): An enumeration specifying the
algorithm for which the created keys will be compliant.
length(int): The length of the keys to be created. This value must
be compliant with the constraints of the provided algorithm.
Returns:
dict: A dictionary containing the public key data, with the
following key/value fields:
* value - the bytes of the key
* format - a KeyFormatType enumeration for the bytes format
dict: A dictionary containing the private key data, identical in
structure to the public key dictionary.
"""
|
viktorTarasov/PyKMIP
|
kmip/services/server/crypto/api.py
|
Python
|
apache-2.0
| 2,580
|
# -*- encoding: utf-8 -*-
from django.db import models
import reversion
from base.model_utils import TimeStampedModel
from base.singleton import SingletonModel
from block.models import (
Page,
PageSection,
Section,
)
|
pkimber/cms
|
cms/models.py
|
Python
|
apache-2.0
| 233
|
#!/usr/bin/env python
#!-*- coding:utf-8 -*-
def read(filename):
dic=[]
with open(filename,'r') as fp:
while True:
lines = fp.readlines(10000)
if not lines :
break
for line in lines:
#line = line.strip('\n')
dic.append(line)
return dic
def Write(file,dic):
with open(file,'w') as fp:
for i in dic:
fp.write(i)
if __name__=='__main__':
test = read('output.txt')
test += read("dire.txt")
print test
Write('output.txt',set(test))
|
momomoxiaoxi/security
|
Scripts/Check.py
|
Python
|
apache-2.0
| 574
|
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from collections import deque
from proboscis import test
from proboscis import asserts
from proboscis import after_class
from proboscis import before_class
from trove.tests.config import CONFIG
from trove.tests.api.instances import instance_info
from trove.tests.api.instances import VOLUME_SUPPORT
from trove.tests.util.users import Requirements
from trove.tests.util import assert_contains
from trove.tests.util import create_dbaas_client
from trove.common.utils import poll_until
@test(groups=["dbaas.api.mgmt.malformed_json"])
class MalformedJson(object):
@before_class
def setUp(self):
self.reqs = Requirements(is_admin=False)
self.user = CONFIG.users.find_user(self.reqs)
self.dbaas = create_dbaas_client(self.user)
volume = None
if VOLUME_SUPPORT:
volume = {"size": 1}
self.instance = self.dbaas.instances.create(
name="qe_instance",
flavor_id=instance_info.dbaas_flavor_href,
volume=volume,
databases=[{"name": "firstdb", "character_set": "latin2",
"collate": "latin2_general_ci"}])
@after_class
def tearDown(self):
self.dbaas.instances.delete(self.instance)
@test
def test_bad_instance_data(self):
databases = "foo"
users = "bar"
try:
self.dbaas.instances.create("bad_instance", 3, 3,
databases=databases, users=users)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s,"
" exception %s" % (httpCode, e))
databases = "u'foo'"
users = "u'bar'"
assert_contains(
e.message,
["Validation error:",
"instance['databases'] %s is not of type 'array'" % databases,
"instance['users'] %s is not of type 'array'" % users,
"instance['volume'] 3 is not of type 'object'"])
@test
def test_bad_database_data(self):
_bad_db_data = "{foo}"
try:
self.dbaas.databases.create(self.instance.id, _bad_db_data)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create database failed with code %s, "
"exception %s" % (httpCode, e))
_bad_db_data = "u'{foo}'"
asserts.assert_equal(e.message,
"Validation error: "
"databases %s is not of type 'array'" %
_bad_db_data)
@test
def test_bad_user_data(self):
def format_path(values):
values = list(values)
msg = "%s%s" % (values[0],
''.join(['[%r]' % i for i in values[1:]]))
return msg
_user = []
_user_name = "F343jasdf"
_user.append({"name12": _user_name,
"password12": "password"})
try:
self.dbaas.users.create(self.instance.id, _user)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create user failed with code %s, "
"exception %s" % (httpCode, e))
err_1 = format_path(deque(('users', 0)))
assert_contains(
e.message,
["Validation error:",
"%(err_1)s 'name' is a required property" % {'err_1': err_1},
"%(err_1)s 'password' is a required property"
% {'err_1': err_1}])
@test
def test_bad_resize_instance_data(self):
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.instances.resize_instance(self.instance.id, "bad data")
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Resize instance failed with code %s, "
"exception %s" % (httpCode, e))
@test
def test_bad_resize_vol_data(self):
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
data = "bad data"
try:
self.dbaas.instances.resize_volume(self.instance.id, data)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Resize instance failed with code %s, "
"exception %s" % (httpCode, e))
data = "u'bad data'"
assert_contains(
e.message,
["Validation error:",
"resize['volume']['size'] %s is not valid under "
"any of the given schemas" % data,
"%s is not of type 'integer'" % data,
"%s does not match '[0-9]+'" % data])
@test
def test_bad_change_user_password(self):
password = ""
users = [{"name": password}]
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.change_passwords(self.instance, users)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Change usr/passwd failed with code %s, "
"exception %s" % (httpCode, e))
password = "u''"
assert_contains(
e.message,
["Validation error: users[0] 'password' "
"is a required property",
"users[0]['name'] %s is too short" % password,
"users[0]['name'] %s does not match "
"'^.*[0-9a-zA-Z]+.*$'" % password])
@test
def test_bad_grant_user_access(self):
dbs = []
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.grant(self.instance, self.user, dbs)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Grant user access failed with code %s, "
"exception %s" % (httpCode, e))
@test
def test_bad_revoke_user_access(self):
db = ""
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.revoke(self.instance, self.user, db)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 404,
"Revoke user access failed w/code %s, "
"exception %s" % (httpCode, e))
asserts.assert_equal(e.message, "The resource could not be found.")
@test
def test_bad_body_flavorid_create_instance(self):
flavorId = ["?"]
try:
self.dbaas.instances.create("test_instance",
flavorId,
2)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
flavorId = [u'?']
assert_contains(
e.message,
["Validation error:",
"instance['flavorRef'] %s is not valid "
"under any of the given schemas" % flavorId,
"%s is not of type 'string'" % flavorId,
"%s is not of type 'string'" % flavorId,
"%s is not of type 'integer'" % flavorId,
"instance['volume'] 2 is not of type 'object'"])
@test
def test_bad_body_datastore_create_instance(self):
datastore = "*"
datastore_version = "*"
try:
self.dbaas.instances.create("test_instance",
3, {"size": 2},
datastore=datastore,
datastore_version=datastore_version)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
assert_contains(
e.message,
["Validation error:",
"instance['datastore']['type']"
" u'%s' does not match"
" '^.*[0-9a-zA-Z]+.*$'" % datastore,
"instance['datastore']['version'] u'%s' "
"does not match '^.*[0-9a-zA-Z]+.*$'" % datastore_version])
@test
def test_bad_body_volsize_create_instance(self):
volsize = "h3ll0"
try:
self.dbaas.instances.create("test_instance",
"1",
volsize)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
volsize = "u'h3ll0'"
asserts.assert_equal(e.message,
"Validation error: "
"instance['volume'] %s is not of "
"type 'object'" % volsize)
|
changsimon/trove
|
trove/tests/api/mgmt/malformed_json.py
|
Python
|
apache-2.0
| 12,085
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
testRecipe1 = """\
class TestRecipe1(PackageRecipe):
name = 'testcase'
version = '1.0'
clearBuildReqs()
owner = 'root'
group = 'root'
withBinary = True
withUse = False
changedconfig = '%(sysconfdir)s/changedconfig'
unchangedconfig = '%(sysconfdir)s/unchangedconfig'
changed = '%(datadir)s/changed'
unchanged = '%(datadir)s/unchanged'
initialFileText = '\\n'.join([str(x) for x in range(0,10)]) + '\\n'
fileText = initialFileText
def modifyFiles(self):
pass
def setup(self):
if self.withUse:
if Use.readline:
pass
if self.withBinary:
self.Run('''
cat > hello.c <<'EOF'
#include <stdio.h>
int main(void) {
return printf("Hello, world.\\\\n");
}
EOF
''')
self.Make('hello', preMake='LDFLAGS="-static"')
self.Install('hello', '%(bindir)s/')
self.Create(self.changedconfig, self.unchangedconfig,
self.changed, self.unchanged, contents=self.initialFileText)
self.modifyFiles()
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(datadir)s/', '%(sysconfdir)s/')
self.Strip(debuginfo=False)
"""
testRecipe2="""\
class TestRecipe2(TestRecipe1):
version = '1.1'
fileText = TestRecipe1.fileText.replace("5", "1")
def modifyFile(self, path):
return 'sed -i s/^5/1/g %(destdir)s'+path
def modifyFiles(self):
for path in (self.changedconfig, self.changed):
self.Run(self.modifyFile(path))
def setup(self):
TestRecipe1.setup(self)
"""
testRecipe3="""\
class TestRecipe3(TestRecipe1):
version = '1.2'
fileText = TestRecipe1.fileText.replace("6", "2")
def modifyFile(self, path):
return 'sed -i s/^6/2/g %(destdir)s'+path
def modifyFiles(self):
for path in (self.changedconfig,):
self.Run(self.modifyFile(path))
def setup(self):
TestRecipe1.setup(self)
"""
testRecipe4="""\
class TestRecipe4(TestRecipe1):
version = '1.3'
def setup(self):
TestRecipe1.setup(self)
self.Config(exceptions = "/etc/.*")
"""
# like TestRecipe1, but only includes /usr/bin/hello
testRecipe5="""\
class TestRecipe5(TestRecipe1):
version = '1.4'
def setup(r):
TestRecipe1.setup(r)
r.Remove(r.changed)
r.Remove(r.unchanged)
r.Remove(r.changedconfig)
r.Remove(r.unchangedconfig)
"""
testTransientRecipe1=r"""\
class TransientRecipe1(PackageRecipe):
name = 'testcase'
version = '1.0'
clearBuildReqs()
fileText = 'bar\n'
def setup(r):
r.Create('/foo', contents=r.fileText)
r.Transient('/foo')
"""
testTransientRecipe2=r"""\
class TransientRecipe2(PackageRecipe):
name = 'testcase'
version = '1.1'
clearBuildReqs()
fileText = 'blah\n'
def setup(r):
r.Create('/foo', contents=r.fileText)
r.Transient('/foo')
"""
testTransientRecipe3=r"""\
class TransientRecipe3(PackageRecipe):
name = 'testcase'
version = '1.2'
clearBuildReqs()
fileText = 'blah\n'
def setup(r):
#don't create foo
r.Create('/foo2', contents=r.fileText)
r.Transient('/foo2')
"""
testTransientRecipe4=r"""\
class TransientRecipe4(PackageRecipe):
name = 'testcase'
version = '1.3'
clearBuildReqs()
fileText = 'blahblech\n'
def setup(r):
#don't create foo
r.Create('/foo3', contents=r.fileText)
r.Transient('/foo3')
"""
libhelloRecipePreface="""\
class Libhello(PackageRecipe):
name = 'libhello'
version = '0'
clearBuildReqs()
def setup(self):
# NormalizeInterpreterPaths not the purpose of these tests,
# and dealing with it running would make tests needlessly
# and uselessly more verbose.
del self.NormalizeInterpreterPaths
self.Create('libhello.c', contents='''
/* libhello.c - Simple example of a shared library */
void return_one(void) {
return 1;
}
''')
self.Create('true.c', contents='''
int main() {
return 0;
}
''')
self.Create('user.c', contents='''
int main() {
return return_one();
}
''')
"""
libhelloRecipe = libhelloRecipePreface + r"""
self.Run('%(cc)s %(ldflags)s -fPIC -shared -Wl,-soname,libhello.so.0 -o libhello.so.0.0 libhello.c -nostdlib')
self.Run('%(cc)s %(ldflags)s -static -o true true.c')
self.Run('%(cc)s %(ldflags)s -nostdlib -o user user.c libhello.so.0.0')
self.Install('libhello.so.0.0', '%(libdir)s/libhello.so.0.0')
self.Install('true', '%(essentialsbindir)s/ldconfig', mode=0755)
self.Install('user', '%(essentialsbindir)s/user', mode=0755)
self.Create('/etc/ld.so.conf', contents='/%(lib)s')
self.Create('%(essentialbindir)s/script',
contents='#!%(essentialsbindir)s/user', mode = 0755)
self.Provides('file', '%(essentialsbindir)s/user')
self.ComponentSpec('runtime', '%(essentialsbindir)s/ldconfig',
'%(libdir)s/libhello.so.0.*',
'%(sysconfdir)s/')
self.ComponentSpec('user', '%(essentialsbindir)s/user')
self.ComponentSpec('script', '%(essentialbindir)s/script')
self.Strip(debuginfo=False)
"""
libhelloRecipeLdConfD = libhelloRecipePreface + r"""
self.Run('%(cc)s %(ldflags)s -fPIC -shared -Wl,-soname,libhello.so.0 -o libhello.so.0.0 libhello.c -nostdlib')
self.Run('%(cc)s %(ldflags)s -static -o true true.c')
self.Run('%(cc)s %(ldflags)s -nostdlib -o user user.c libhello.so.0.0')
self.Install('libhello.so.0.0', '%(libdir)s/libhello.so.0.0')
self.Install('libhello.so.0.0', '%(essentiallibdir)s/libhello.so.0.0')
self.Install('true', '%(essentialsbindir)s/ldconfig', mode=0755)
self.Install('user', '%(essentialsbindir)s/user', mode=0755)
self.Create('/etc/ld.so.conf', contents='/opt/foo')
self.Create('/etc/ld.so.conf.d/first.conf', contents='%(essentiallibdir)s')
self.Create('%(essentialbindir)s/script',
contents='#!%(essentialsbindir)s/user', mode = 0755)
self.Provides('file', '%(essentialsbindir)s/user')
self.ComponentSpec('runtime', '%(essentialsbindir)s/ldconfig',
'%(libdir)s/libhello.so.0.*',
'%(essentiallibdir)s/libhello.so.0.*',
'/etc/ld.so.conf.d/first.conf',
'%(sysconfdir)s/')
self.ComponentSpec('user', '%(essentialsbindir)s/user')
self.ComponentSpec('script', '%(essentialbindir)s/script')
self.Strip(debuginfo=False)
"""
libhelloRecipeNoVersion = libhelloRecipePreface + """\
self.Run('%(cc)s %(ldflags)s -fPIC -shared -Wl,-soname,libhello.so -o libhello.so libhello.c -nostdlib')
self.Run('%(cc)s %(ldflags)s -static -o true true.c')
self.Run('%(cc)s %(ldflags)s -nostdlib -o user user.c libhello.so')
self.Install('libhello.so', '%(libdir)s/libhello.so', mode=0644)
self.Install('true', '%(essentialsbindir)s/ldconfig', mode=0755)
self.Install('user', '%(essentialsbindir)s/user', mode=0755)
self.Create('/etc/ld.so.conf', contents='/lib')
self.Create('%(essentialbindir)s/script',
contents='#!%(essentialsbindir)s/user', mode = 0755)
self.Provides('file', '%(essentialsbindir)s/user')
self.ComponentSpec('runtime', '%(essentialsbindir)s/ldconfig',
'%(libdir)s/libhello.so',
'%(sysconfdir)s/')
self.ComponentSpec('user', '%(essentialsbindir)s/user')
self.ComponentSpec('script', '%(essentialbindir)s/script')
self.Strip(debuginfo=False)
"""
bashRecipe="""\
class Bash(PackageRecipe):
name = 'bash'
version = '0'
clearBuildReqs()
def setup(r):
del r.NormalizeInterpreterPaths
r.Create('%(essentialbindir)s/bash', mode=0755)
r.Create('%(essentialbindir)s/conflict', mode=0755)
r.Provides('file', '%(essentialbindir)s/(ba)?sh')
if Use.ssl:
# turn on this use flag; we use this in the tests for flavor
# dependent resolution
pass
"""
bashMissingRecipe="""\
class Bash(PackageRecipe):
name = 'bash'
version = '1'
clearBuildReqs()
def setup(r):
del r.NormalizeInterpreterPaths
r.Create('%(essentialbindir)s/conflict', mode=0755)
if Use.ssl:
# turn on this use flag; we use this in the tests for flavor
# dependent resolution
pass
"""
bashUserRecipe="""\
class BashUser(PackageRecipe):
name = 'bashuser'
version = '0'
clearBuildReqs()
def setup(r):
del r.NormalizeInterpreterPaths
r.Create('%(essentialbindir)s/script', mode=0755,
contents = '#!/bin/bash')
"""
bashTroveUserRecipe="""\
class BashTroveUser(PackageRecipe):
name = 'bashtroveuser'
version = '0'
clearBuildReqs()
def setup(r):
del r.NormalizeInterpreterPaths
r.Create('%(essentiallibdir)s/empty', mode=0644)
r.Requires('bash:runtime', '%(essentiallibdir)s/empty')
"""
gconfRecipe="""\
class Gconf(PackageRecipe):
name = 'gconf'
version = '0'
clearBuildReqs()
def setup(r):
r.Create('%(sysconfdir)s/gconf/schemas/foo')
r.Install('/bin/true', '%(bindir)s/gconftool-2', mode=0755)
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
chkconfigRecipe="""\
class ChkconfigTest(PackageRecipe):
name = 'testchk'
version = '0'
clearBuildReqs()
def setup(self):
self.Run('''
cat > chkconfig.c <<'EOF'
int main(int argc, char ** argv) {
int fd;
char ** chptr;
fd = open(\"OUT\", 0102, 0666);
for (chptr = argv; *chptr; chptr++) {
write(fd, *chptr, strlen(*chptr));
if (*(chptr + 1)) write(fd, \" \", 1);
}
write(fd, \"\\\\n\", 1);
close(fd);
}
EOF
''')
self.Run('''
cat > testchk <<'EOF'
# chkconfig: 345 95 5
# description: Runs commands scheduled by the at command at the time \
# specified when at was run, and runs batch commands when the load \
# average is low enough.
# processname: atd
EOF
''')
self.Run('%(cc)s %(ldflags)s -static -o chkconfig chkconfig.c')
self.Install("chkconfig", "%(essentialsbindir)s/", mode = 0755)
self.Install("testchk", "%(initdir)s/", mode = 0755)
self.Strip(debuginfo=False)
"""
doubleRecipe1 = """
class Double(PackageRecipe):
name = 'double'
version = '1.0'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo1", contents = "text1")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
doubleRecipe1_1 = """
class Double(PackageRecipe):
name = 'double'
version = '1.1'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo1.1", contents = "text1.1")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
doubleRecipe1_2 = """
class Double(PackageRecipe):
name = 'double'
version = '1.2'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo1.2", contents = "text1.2")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
doubleRecipe1_3 = """
class Double(PackageRecipe):
name = 'double'
version = '1.3'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo1.3", contents = "text1.3")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
doubleRecipe2 = """
class Double(PackageRecipe):
name = 'double'
version = '2.0'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo2", contents = "text2")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
doubleRecipe2_1 = """
class Double(PackageRecipe):
name = 'double'
version = '2.1'
clearBuildReqs()
owner = 'root'
group = 'root'
def setup(self):
self.Create("/etc/foo2.1", contents = "text2.1")
self.Ownership(self.owner, self.group, '.*')
self.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
simpleTagHandler = """r.Run('''
cat > testtag.taghandler.c <<'EOF'
int main(int argc, char ** argv) {
int fd;
char ** chptr;
fd = open(\"OUT%s\", 0102, 0666);
for (chptr = argv; *chptr; chptr++) {
write(fd, *chptr, strlen(*chptr));
if (*(chptr + 1)) write(fd, \" \", 1);
}
write(fd, \"\\\\n\", 1);
close(fd);
}
EOF
''')
r.Run('%%(cc)s %%(ldflags)s -static -o testtag.taghandler testtag.taghandler.c')
r.Strip(debuginfo=False)"""
tagProviderRecipe1 = """
class TagProvider(PackageRecipe):
name = 'tagprovider'
version = '0'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testtag.tagdescription <<EOF
file /usr/libexec/conary/tags/testtag
implements files update
implements files remove
include /etc/test.*
EOF
''')
%(simpleTagHandler)s
r.Install('testtag.tagdescription',
'%%(tagdescriptiondir)s/testtag')
r.Install('testtag.taghandler',
'%%(taghandlerdir)s/testtag')
# Also test tagging our own files
r.Create('/etc/testself.1')
r.ComponentSpec('runtime', '%%(sysconfdir)s/')
""" % { 'simpleTagHandler' : (simpleTagHandler % "") }
tagProviderRecipe2 = """
class TagProvider(PackageRecipe):
name = 'tagprovider'
version = '1'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testtag.tagdescription <<EOF
file /usr/libexec/conary/tags/testtag
implements files update
implements files preremove
implements files remove
implements files preupdate
implements handler update
implements handler preremove
datasource args
include /etc/test.*
EOF
''')
%(simpleTagHandler)s
r.Install('testtag.tagdescription',
'%%(tagdescriptiondir)s/testtag')
r.Install('testtag.taghandler',
'%%(taghandlerdir)s/testtag')
# Also test tagging our own files
r.Create('/etc/testself.1')
r.ComponentSpec('runtime', '%%(sysconfdir)s/')
""" % { 'simpleTagHandler' : (simpleTagHandler % "") }
tagProviderRecipe3 = """
class TagProvider(PackageRecipe):
name = 'tagprovider'
version = '1'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testtag.tagdescription <<EOF
file /usr/libexec/conary/tags/testtag
implements files update
datasource stdin
include /etc/test.*
EOF
''')
%(simpleTagHandler)s
r.Install('testtag.tagdescription',
'%%(tagdescriptiondir)s/testtag')
r.Install('testtag.taghandler',
'%%(taghandlerdir)s/testtag')
""" % { 'simpleTagHandler' : (simpleTagHandler % "") }
# this is just like tagProviderRecipe2, but the tagdescription will create
# /tmp/OUT2 instead of /tmp/OUT
tagProviderRecipe4 = """
class TagProvider(PackageRecipe):
name = 'tagprovider'
version = '1'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testtag.tagdescription <<EOF
file /usr/libexec/conary/tags/testtag
implements files update
implements files preremove
implements files remove
implements handler update
implements handler preremove
datasource args
include /etc/test.*
EOF
''')
%(simpleTagHandler)s
r.Install('testtag.tagdescription',
'%%(tagdescriptiondir)s/testtag')
r.Install('testtag.taghandler',
'%%(taghandlerdir)s/testtag')
# Also test tagging our own files
r.Create('/etc/testself.1')
r.ComponentSpec('runtime', '%%(sysconfdir)s/')
""" % { 'simpleTagHandler' : (simpleTagHandler % "2") }
# this is just like tagProviderRecipe2, but it has a more limited implements
# set
tagProviderRecipe5 = """
class TagProvider(PackageRecipe):
name = 'tagprovider'
version = '1'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testtag.tagdescription <<EOF
file /usr/libexec/conary/tags/testtag
implements files remove
datasource args
include /etc/test.*
EOF
''')
%(simpleTagHandler)s
r.Install('testtag.tagdescription',
'%%(tagdescriptiondir)s/testtag')
r.Install('testtag.taghandler',
'%%(taghandlerdir)s/testtag')
# Also test tagging our own files
r.Create('/etc/testself.1')
r.ComponentSpec('runtime', '%%(sysconfdir)s/')
""" % { 'simpleTagHandler' : (simpleTagHandler % "") }
firstTagUserRecipe1 = """
class FirstTagUser(PackageRecipe):
name = 'firsttaguser'
version = '0'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testfirst.1 <<EOF
first.1
EOF
''')
r.Run('''
cat > testfirst.2 <<EOF
first.2
EOF
''')
r.Install('testfirst.1', '/etc/testfirst.1')
r.Install('testfirst.2', '/etc/testfirst.2')
r.TagSpec('testtag', '/etc/test.*')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
secondTagUserRecipe1 = """
class SecondTagUser(PackageRecipe):
name = 'secondtaguser'
version = '0'
clearBuildReqs()
def setup(r):
r.Run('''
cat > testsecond.1 <<EOF
second.1
EOF
''')
r.Install('testsecond.1', '/etc/testsecond.1')
r.TagSpec('testtag', '/etc/test.*')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
multiTagRecipe0 = """
class MultiTag(PackageRecipe):
name = 'multitag'
version = '0'
clearBuildReqs()
def setup(r):
r.Create('%(tagdescriptiondir)s/foo', contents='''file %(taghandlerdir)s/foo
implements files update
implements files remove
datasource multitag
''')
r.Create('%(tagdescriptiondir)s/bar', contents='''file %(taghandlerdir)s/foo
implements files update
implements files remove
datasource multitag
''')
r.Create('%(taghandlerdir)s/foo', mode=0755, contents='''\
#!/bin/bash
exit 0
''')
r.Create('/foo')
r.TagSpec('foo', '/foo')
"""
multiTagRecipe = multiTagRecipe0 + """
r.TagSpec('bar', '/foo')
"""
multiTagRecipe2 = multiTagRecipe0
# Test
multiTagRecipe3 = multiTagRecipe0.replace("exit 0",
"echo ${SOMEVAR:-UNDEFINED}; exit 0")
linkRecipe1 = """\
class LinkRecipe(PackageRecipe):
name = 'linktest'
version = '1.0'
clearBuildReqs()
hard = 1
paths = ("/usr/share/foo", "/usr/share/bar")
initialFileText = '\\n'.join([str(x) for x in range(0,10)]) + '\\n'
fileText = initialFileText
def setup(r):
r.Create(r.paths[0], contents=r.initialFileText)
for path in r.paths[1:]:
if r.hard:
r.Run("ln %%(destdir)s/%s %%(destdir)s/%s" % (r.paths[0], path))
else:
r.Run("ln -s %s %%(destdir)s/%s" % (r.paths[0], path))
"""
linkRecipe2 = """\
class LinkRecipe2(LinkRecipe):
name = 'linktest'
version = '1.1'
"""
linkRecipe3 = """\
class LinkRecipe3(LinkRecipe):
name = 'linktest'
version = '1.2'
paths = ("/usr/share/foo", "/usr/share/bar", "/usr/share/foobar")
"""
# two link groups, both linkgroups have the same contents sha1
linkRecipe4 = """\
class LinkRecipe(PackageRecipe):
name = 'linktest'
version = '1.0'
clearBuildReqs()
hard = 1
paths = ('/usr/share/lg1-1',
'/usr/share/lg1-2',
'/usr/share/lg2-1',
'/usr/share/lg2-2')
initialFileText = '\\n'.join([str(x) for x in range(0,10)]) + '\\n'
fileText = initialFileText
def setup(r):
r.Create(r.paths[0], contents=r.initialFileText)
r.Run("ln %%(destdir)s/%s %%(destdir)s/%s" % (r.paths[0],
r.paths[1]))
r.Create(r.paths[2], contents=r.initialFileText)
r.Run("ln %%(destdir)s/%s %%(destdir)s/%s" % (r.paths[2],
r.paths[3]))
"""
idChange1 = """\
class IdChange1(PackageRecipe):
name = 'idchange'
version = '1.0'
clearBuildReqs()
paths = [ "/etc/foo", "/etc/bar" ]
fileText = '\\n'.join([str(x) for x in range(0,10)]) + '\\n'
def setup(r):
for path in r.paths:
r.Create(path, contents=r.fileText)
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
idChange2 = """\
class IdChange2(IdChange1):
paths = [ "/etc/foo" ]
fileText = IdChange1.fileText
fileText.replace("5", "10")
version = '1.1'
"""
idChange3 = """\
class IdChange3(IdChange1):
paths = [ "/etc/foo", "/etc/bar" ]
fileText = IdChange1.fileText
fileText.replace("6", "11")
version = '1.2'
"""
testUnresolved = """\
class Unresolved(PackageRecipe):
name = 'testcase'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/bin/test', mode=0755)
r.Requires('bar:foo', '/usr/bin/test')
"""
testTroveDepA = """\
class A(PackageRecipe):
name = 'a'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/bin/a', mode=0755)
"""
testTroveDepB = """\
class B(PackageRecipe):
name = 'b'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/bin/b', mode=0755)
r.Requires('a:runtime', '/usr/bin/b')
"""
# these test updating a config file from a version which will no longer
# exist (and be cleared from the content store) to a new one
simpleConfig1 = """\
class SimpleConfig1(PackageRecipe):
name = 'simpleconfig'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create("/etc/foo", contents = "text 1")
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
simpleConfig2 = """\
class SimpleConfig2(PackageRecipe):
name = 'simpleconfig'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create("/etc/foo", contents = "text 2")
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
testRecipeTemplate = """\
class TestRecipe%(num)d(PackageRecipe):
name = 'test%(num)d'
version = '%(version)s'
clearBuildReqs()
buildRequires = [ %(requires)s ]
%(header)s
%(flags)s
def setup(r):
%(flavor)s
r.Create('/usr/bin/test%(num)s',contents='''\
#!/bin/sh
echo "This is test%(num)s"
%(fileContents)s
''', mode=0755)
del r.NormalizeInterpreterPaths
if %(binary)s:
r.Run('''
cat > hello.c <<'EOF'
#include <stdio.h>
int main(void) {
return printf("Hello, world.\\\\n");
}
EOF
''')
r.Make('hello', preMake='LDFLAGS="-static"')
r.Install('hello', '%%(bindir)s/')
%(content)s
%(subpkgs)s
%(tagspec)s
%(fail)s
# override :config
r.ComponentSpec('runtime', '.*')
"""
def createRecipe(num, requires=[], fail=False, content='',
packageSpecs=[],
subPackages = [], version='1.0', localflags=[], flags=[],
header='', fileContents='', tag=None, binary=False):
reqList = []
for req in requires:
reqList.append("'test%d:runtime'" % req)
subs = {}
subs['requires'] = ', '.join(reqList)
subs['version'] = version
subs['num'] = num
subs['content'] = content
subs['fileContents'] = fileContents
subs['header'] = header
subs['binary'] = binary
subpkgStrs = []
flagStrs = []
flavorStrs = []
if localflags and not isinstance(localflags, (tuple, list)):
localflags = [localflags]
for flag in localflags:
flagStr = 'Flags.%s = True' % flag
flavorStr = 'if Flags.%s: pass' % flag
flagStrs.append(flagStr)
flavorStrs.append(flavorStr)
if tag:
subs['tagspec'] = "r.TagSpec('%s', '/usr/bin/test1')" % tag
else:
subs['tagspec'] = ''
if flags and not isinstance(flags, (tuple, list)):
flags = [flags]
for flag in flags:
flavorStr = 'if %s: pass' % flag
flavorStrs.append(flavorStr)
subs['flags'] = '\n '.join(flagStrs)
subs['flavor'] = '\n '.join(flavorStrs)
# add indentation
subpkgStrs.append('\n '.join(packageSpecs))
for subpkg in subPackages:
subpkgStr = '''
r.Create('%%(thisdocdir)s/README-%(subpkg)s')
r.Create('/asdf/runtime-%(subpkg)s')
r.PackageSpec('%(name)s-%(subpkg)s', 'README-%(subpkg)s')
r.PackageSpec('%(name)s-%(subpkg)s', 'runtime-%(subpkg)s')
''' % { 'name' : ('test%d' % num), 'subpkg' : subpkg }
subpkgStrs.append(subpkgStr)
subs['subpkgs'] = '\n'.join(subpkgStrs)
if fail:
subs['fail'] = 'r.Run("exit 1")'
else:
subs['fail'] = ''
return testRecipeTemplate % subs
fileTypeChangeRecipe1="""\
class FileTypeChange(PackageRecipe):
name = 'filetypechange'
version = '1'
clearBuildReqs()
def setup(r):
r.Create('%(essentialbindir)s/foo', mode=0755, contents = 'some text')
"""
fileTypeChangeRecipe2="""\
class FileTypeChange(PackageRecipe):
name = 'filetypechange'
version = '2'
clearBuildReqs()
def setup(r):
r.Run("mkdir %(destdir)s%(essentialbindir)s")
r.Run("ln -s foo %(destdir)s%(essentialbindir)s/foo")
"""
manyFlavors = """\
class ManyFlavors(PackageRecipe):
name = 'manyflavors'
version = '1.0'
clearBuildReqs()
def setup(r):
if Use.readline:
r.Create("/etc/readline", contents = "text 1")
if Use.ssl:
r.Create("/etc/ssl", contents = "text 1")
if not Use.ssl and not Use.readline:
r.Create("/etc/none", contents = "text 1")
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
manyFlavors2 = """\
class ManyFlavors(PackageRecipe):
name = 'manyflavors'
version = '2.0'
clearBuildReqs()
def setup(r):
if Use.readline:
r.Create("/etc/readline", contents = "text 1")
if Use.ssl:
r.Create("/etc/ssl", contents = "text 1")
if not Use.ssl and not Use.readline:
r.Create("/etc/none", contents = "text 1")
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
autoSource0 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('localfile')
"""
autoSource1 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('distcc-2.9.tar.bz2')
r.addSource('localfile')
"""
autoSource2 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '2.0'
clearBuildReqs()
def setup(r):
r.addSource('multilib-sample.tar.bz2')
r.addSource('localfile')
"""
autoSource3 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '3.0'
clearBuildReqs()
def setup(r):
r.addSource('multilib-sample.tar.bz2')
r.addSource('localfile')
r.Create('/foo')
"""
autoSource4 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('distcache-1.4.5.tar.bz2')
r.Create('/foo')
"""
autoSource5 = """\
class AutoSource(PackageRecipe):
name = 'autosource'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('distcache-1.4.5.tar.bz2', rpm='distcache-1.4.5-2.src.rpm')
r.Create('/bar')
"""
configFileGoesEmpty1 = """\
class Config(PackageRecipe):
name = 'config'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/config', contents='test 123')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
configFileGoesEmpty2 = """\
class Config(PackageRecipe):
name = 'config'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/config')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
testRemove1 = """\
class Remove(PackageRecipe):
name = 'remove'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/config')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
testRemove2 = """\
class Remove(PackageRecipe):
name = 'remove'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/blah')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
configFileBecomesSymlink1 = """\
class Config(PackageRecipe):
name = 'config'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/config', contents='test 123')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
configFileBecomesSymlink2 = """\
class Config(PackageRecipe):
name = 'config'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/foo', contents='test 234')
r.Symlink('foo', '/etc/config')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
symlinkBecomesFile1 = """\
class Test(PackageRecipe):
name = 'test'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/man/man1/bar.1', contents='test 234')
r.Symlink('bar.1', '/usr/share/man/man1/foo.1')
"""
symlinkBecomesFile2 = """\
class Test(PackageRecipe):
name = 'test'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/man/man1/foo.1', contents='test 123')
"""
branchedFileIdTest1 = """
class BranchedFileId(PackageRecipe):
name = 'branchedFileId'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/first', 'unchanged')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
branchedFileIdTest2 = """
class BranchedFileId(PackageRecipe):
name = 'branchedFileId'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/second', 'unchanged')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
pathIdTest1 = """
class PathIdTest(PackageRecipe):
name = 'PathIdTest'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create("/lib/1")
r.Create("/lib/first")
r.Create("/lib/non-utf8" + '\200')
r.NonUTF8Filenames(exceptions="/lib/non-utf8" + '\200')
"""
pathIdTest2 = """
class PathIdTest(PackageRecipe):
name = 'PathIdTest'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create("/lib/1")
r.Create("/lib/2")
"""
pathIdTest3 = """
class PathIdTest(PackageRecipe):
name = 'PathIdTest'
version = '3.0'
clearBuildReqs()
def setup(r):
r.Create("/lib/1")
r.Create("/lib/2")
r.Create("/lib/3")
"""
pathIdTest4 = """
class PathIdTest(PackageRecipe):
name = 'PathIdTest'
version = '4.0'
clearBuildReqs()
def setup(r):
r.Create("/lib/1")
r.Create("/lib/2")
r.Create("/lib/3")
r.Create("/lib/4")
"""
depsMultiVersionTest1 = """
class Foo(PackageRecipe):
name = 'foo'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('%(libdir)s/libfoo.so.1')
r.Provides('file', '%(libdir)s/libfoo.so.1')
"""
depsMultiVersionTest2 = """
class Foo(PackageRecipe):
name = 'foo'
version = '2.0'
clearBuildReqs()
def setup(r):
r.Create('%(libdir)s/libfoo.so.2')
r.Provides('file', '%(libdir)s/libfoo.so.2')
"""
depsMultiVersionUser1 = """
class Bar(PackageRecipe):
name = 'bar'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('%(bindir)s/bar', mode=0755)
r.Requires('%(libdir)s/libfoo.so.1' %r.macros, '%(bindir)s/bar')
"""
depsMultiVersionUser2 = """
class Baz(PackageRecipe):
name = 'baz'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('%(bindir)s/baz', mode=0755)
r.Requires('%(libdir)s/libfoo.so.2' %r.macros, '%(bindir)s/baz')
"""
testSuiteRecipe = """
class TestSuiteRecipe(PackageRecipe):
name = 'testcase'
version = '1'
clearBuildReqs()
def setup(r):
r.Run('mkdir test; echo -e \#\!/bin/notthere\\nhi > test/foo; chmod 755 test/foo')
r.TestSuite('test', autoBuildMakeDependencies=False)
r.Create('/etc/foo')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
dependencyGroup = """
class DependencyGroup(GroupRecipe):
name = 'group-test'
version = '1.0'
clearBuildRequires()
def setup(self):
self.Requires('other')
self.addTrove('test:runtime')
"""
initialContentsRecipe0 = """
class InitialContentsTest(PackageRecipe):
name = 'initcontents'
version = '0'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='initialtransientcontents')
r.Transient('/foo')
"""
initialContentsRecipe01 = """
class InitialContentsTest(PackageRecipe):
name = 'initcontents'
version = '0.1'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='initialregularcontents')
"""
initialContentsRecipe02 = """
class InitialContentsTest(PackageRecipe):
name = 'initcontents'
version = '0.1'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='initialconfigcontents')
r.Config('/foo')
r.ComponentSpec('runtime', '/foo')
"""
initialContentsRecipe1 = """
class InitialContentsTest(PackageRecipe):
name = 'initcontents'
version = '1'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='initialrecipecontents')
r.InitialContents('/foo')
"""
initialContentsRecipe2 = """
class InitialContentsTest(PackageRecipe):
name = 'initcontents'
version = '2'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='secondrecipecontents')
r.InitialContents('/foo')
"""
otherRecipe = """
class Other(PackageRecipe):
name = 'other'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/etc/other', contents='secondrecipecontents')
r.Requires('test:config', '/etc/other')
r.ComponentSpec('runtime', '%(sysconfdir)s/')
"""
testGroup1 = """
class TestGroup(GroupRecipe):
name = 'group-test'
version = '1.0'
clearBuildRequires()
def setup(self):
self.addTrove('test1', '1.0')
"""
testGroup2 = """
class TestGroup(GroupRecipe):
name = 'group-test'
version = '1.0'
clearBuildRequires()
def setup(self):
self.addTrove('test1', '1.1')
"""
testGroup3 = """
class TestGroup(GroupRecipe):
name = 'group-test'
version = '1.0'
clearBuildRequires()
checkPathConflicts = True
def setup(self):
self.startGroup('group-test2', checkPathConflicts=False, groupName='group-test')
self.addTrove('test1', '1.0')
self.startGroup('group-test3', groupName='group-test')
self.addTrove('test2', '1.0')
"""
userInfoRecipe = """
class UserMe(UserInfoRecipe):
name = 'info-%(user)s'
version = '1'
clearBuildReqs()
def setup(r):
r.User('%(user)s', %(uid)s)
"""
syncGroupRecipe1 = """
class SyncGroup(GroupRecipe):
name = 'group-sync'
version = '1'
imageGroup = False
clearBuildRequires()
def setup(r):
r.addTrove('synctrove', '1', byDefault=False)
"""
syncGroupRecipe2 = """
class SyncGroup(GroupRecipe):
name = 'group-sync'
version = '2'
imageGroup = False
clearBuildRequires()
def setup(r):
r.addTrove('synctrove', '2', byDefault=False)
"""
syncTroveRecipe1 = """
class SyncTrove(PackageRecipe):
name = 'synctrove'
version = '1'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/foo1')
r.Create('%(debugsrcdir)s/%(name)s-%(version)s/foo1')
"""
syncTroveRecipe2 = """
class SyncTrove(PackageRecipe):
name = 'synctrove'
version = '2'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/foo2')
r.Create('%(debugsrcdir)s/%(name)s-%(version)s/foo2')
"""
notByDefaultRecipe = """
class NotByDefault(PackageRecipe):
name = 'testcase'
version = '1.0'
clearBuildReqs()
def setup(r):
if Use.readline: pass
r.Create('/usr/share/foo2')
r.Create('%(debugsrcdir)s/%(name)s-%(version)s/foo2')
"""
sourceSuperClass1 = """
class SourceSuperClass(PackageRecipe):
name = 'superclass'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/foo2')
"""
sourceSuperClass2 = """
class SourceSuperClass(PackageRecipe):
name = 'superclass'
version = '1.0'
clearBuildReqs()
def setup(r):
r.Create('/usr/share/foo2')
r.addSource('newsource')
r.Install('newsource', '/usr/share/foo3')
"""
sourceSubClass1 = """
loadRecipe('superclass')
class sourceSubClass(SourceSuperClass):
name = 'subclass'
version = '1.0'
clearBuildReqs()
"""
simpleRecipe = """
class SimpleRecipe(PackageRecipe):
name = 'simple'
version = '1'
clearBuildReqs()
def setup(r):
r.Create('/foo', contents='simple')
"""
basicSplitGroup = """
class splitGroup(GroupRecipe):
name = 'group-first'
version = '1.0'
checkPathConflicts = False
clearBuildRequires()
def setup(self):
self.add("test", "@rpl:linux")
self.createGroup('group-second')
self.createGroup('group-third')
self.add("test", "@rpl:linux",
groupName = ['group-second', 'group-third'])
# add group-second to group-first
self.addNewGroup('group-second')
"""
buildReqTest1 = """\
class BuildReqTest(PackageRecipe):
name = 'foo'
version = '1.0'
clearBuildReqs()
buildRequires = ['blah']
def setup(r):
r.addSource('distcc-2.9.tar.bz2')
"""
unknownFlagRecipe = """\
class BuildReqTest(PackageRecipe):
name = 'foo'
version = '1.0'
clearBuildReqs()
if Use.ffff:
pass
def setup(r):
if Use.ffff:
r.Create('/foo', contents='simple')
else:
r.Create('/bar', contents='simple')
"""
simpleFactory = """\
class SimpleFactory(Factory):
name = "factory-simple"
version = "1.0"
def getRecipeClass(self):
class Subclass(PackageRecipe):
internalAbstractBaseClass = True
name = "subclass"
version = "1.0"
return Subclass
"""
simpleFactoryWithSources = """\
class SimpleFactory(Factory):
name = "factory-simple"
version = "1.0"
def getRecipeClass(self):
clearBuildRequires()
f = self.openSourceFile('VERSION')
readVersion = f.read()[:-1]
f.close()
class RealRecipe(PackageRecipe):
name = self.packageName
version = readVersion
def setup(r):
if False:
# make sure FactoryException is available
raise FactoryException
r.Create("/foo", contents = readVersion + "\\n")
return RealRecipe
"""
|
sassoftware/conary
|
conary_test/recipes.py
|
Python
|
apache-2.0
| 40,164
|
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import TensorDict, TensorSpec, TensorsSpec, tf_function, tf_util
from tensorforce.core.optimizers import UpdateModifier
from tensorforce.core.optimizers.solvers import solver_modules
class LinesearchStep(UpdateModifier):
"""
Line-search-step update modifier, which performs a line search on the update step returned by
the given optimizer to find a potentially superior smaller step size
(specification key: `linesearch_step`).
Args:
optimizer (specification): Optimizer configuration
(<span style="color:#C00000"><b>required</b></span>).
max_iterations (parameter, int >= 1): Maximum number of line search iterations
(<span style="color:#C00000"><b>required</b></span>).
backtracking_factor (parameter, 0.0 < float < 1.0): Line search backtracking factor
(<span style="color:#00C000"><b>default</b></span>: 0.75).
name (string): (<span style="color:#0000C0"><b>internal use</b></span>).
arguments_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, optimizer, max_iterations, backtracking_factor=0.75, name=None, arguments_spec=None
):
super().__init__(optimizer=optimizer, name=name, arguments_spec=arguments_spec)
self.line_search = self.submodule(
name='line_search', module='line_search', modules=solver_modules,
max_iterations=max_iterations, backtracking_factor=backtracking_factor
)
def initialize_given_variables(self, *, variables):
super().initialize_given_variables(variables=variables)
self.line_search.complete_initialize(
arguments_spec=self.arguments_spec, values_spec=self.variables_spec
)
@tf_function(num_args=1)
def step(self, *, arguments, variables, fn_loss, **kwargs):
loss_before = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_before,)):
deltas = self.optimizer.step(
arguments=arguments, variables=variables, fn_loss=fn_loss, **kwargs
)
with tf.control_dependencies(control_inputs=deltas):
def linesearch():
loss_after = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_after,)):
# Replace "/" with "_" to ensure TensorDict is flat
_deltas = TensorDict((
(var.name[:-2].replace('/', '_'), delta)
for var, delta in zip(variables, deltas)
))
# TODO: should be moved to initialize_given_variables, but fn_loss...
def evaluate_step(arguments, deltas):
assignments = list()
for variable, delta in zip(variables, deltas.values()):
assignments.append(variable.assign_add(delta=delta, read_value=False))
with tf.control_dependencies(control_inputs=assignments):
return fn_loss(**arguments.to_kwargs())
_deltas = self.line_search.solve(
arguments=arguments, x_init=_deltas, base_value=loss_before,
zero_value=loss_after, fn_x=evaluate_step
)
return tuple(_deltas.values())
num_nonzero = list()
for delta in deltas:
num_nonzero.append(tf.math.count_nonzero(input=delta))
num_nonzero = tf.math.add_n(inputs=num_nonzero)
return tf.cond(pred=(num_nonzero == 0), true_fn=(lambda: deltas), false_fn=linesearch)
|
reinforceio/tensorforce
|
tensorforce/core/optimizers/linesearch_step.py
|
Python
|
apache-2.0
| 4,502
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-09-06 09:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hackerspace', '0009_verbal_subcategory'),
]
operations = [
migrations.RemoveField(
model_name='programmingquestion',
name='op1',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op2',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op3',
),
migrations.RemoveField(
model_name='programmingquestion',
name='op4',
),
migrations.AddField(
model_name='programmingquestion',
name='Output',
field=models.CharField(default='1', max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='programmingquestion',
name='TestCases',
field=models.CharField(default='2', max_length=200, verbose_name='Test Cases'),
preserve_default=False,
),
migrations.AddField(
model_name='quiz',
name='Answer',
field=models.CharField(default='3', max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='verbal',
name='Answer',
field=models.CharField(default='3', max_length=200),
preserve_default=False,
),
migrations.AlterField(
model_name='programmingquestion',
name='subCategory',
field=models.CharField(choices=[(1, 'Strings'), (2, 'Dynamic Programming'), (3, 'Arrays'), (4, 'Data Structures')], max_length=200),
),
migrations.AlterField(
model_name='test',
name='ProgrammingTagName',
field=models.CharField(choices=[(1, 'Strings'), (2, 'Dynamic Programming'), (3, 'Arrays'), (4, 'Data Structures')], max_length=200, verbose_name='Programming Tags'),
),
migrations.AlterField(
model_name='verbal',
name='subCategory',
field=models.CharField(choices=[(1, 'Comprehension'), (2, 'Error Identification')], max_length=200),
),
]
|
SJIT-Hackerspace/SJIT-CodingPortal
|
hackerspace/migrations/0010_auto_20160906_1442.py
|
Python
|
apache-2.0
| 2,386
|
from .base import Base
from .helper import select_item_by_user
from .actions import Actions
from .browser import Browser
__all__ = ['select_item_by_user', 'Base', 'Actions', 'Browser']
|
yegorshr/CDNetworksAPI
|
cdnetworks/__init__.py
|
Python
|
apache-2.0
| 186
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,E1101
from __future__ import division
import os
import sys
import time
import csv
import shutil
import threading
import errno
import tempfile
import collections
import re
from distutils.version import LooseVersion
try:
import pandas as pd
except ImportError:
pd = None
from wlauto import Instrument, Parameter, IterationResult
from wlauto.instrumentation import instrument_is_installed
from wlauto.exceptions import (InstrumentError, WorkerThreadError, ConfigError,
DeviceNotRespondingError, TimeoutError)
from wlauto.utils.types import boolean, numeric
from wlauto.utils.fps import (FpsProcessor, SurfaceFlingerFrame, GfxInfoFrame, GFXINFO_EXEMPT,
VSYNC_INTERVAL)
PAUSE_LATENCY = 20
EPSYLON = 0.0001
class FpsInstrument(Instrument):
name = 'fps'
description = """
Measures Frames Per Second (FPS) and associated metrics for a workload.
.. note:: This instrument depends on pandas Python library (which is not part of standard
WA dependencies), so you will need to install that first, before you can use it.
Android L and below use SurfaceFlinger to calculate the FPS data.
Android M and above use gfxinfo to calculate the FPS data.
SurfaceFlinger:
The view is specified by the workload as ``view`` attribute. This defaults
to ``'SurfaceView'`` for game workloads, and ``None`` for non-game
workloads (as for them FPS mesurement usually doesn't make sense).
Individual workloads may override this.
gfxinfo:
The view is specified by the workload as ``package`` attribute.
This is because gfxinfo already processes for all views in a package.
This instrument adds four metrics to the results:
:FPS: Frames Per Second. This is the frame rate of the workload.
:frame_count: The total number of frames rendered during the execution of
the workload.
:janks: The number of "janks" that occured during execution of the
workload. Janks are sudden shifts in frame rate. They result
in a "stuttery" UI. See http://jankfree.org/jank-busters-io
:not_at_vsync: The number of frames that did not render in a single
vsync cycle.
"""
supported_platforms = ['android']
parameters = [
Parameter('drop_threshold', kind=numeric, default=5,
description='Data points below this FPS will be dropped as they '
'do not constitute "real" gameplay. The assumption '
'being that while actually running, the FPS in the '
'game will not drop below X frames per second, '
'except on loading screens, menus, etc, which '
'should not contribute to FPS calculation. '),
Parameter('keep_raw', kind=boolean, default=False,
description='If set to ``True``, this will keep the raw dumpsys output '
'in the results directory (this is maily used for debugging) '
'Note: frames.csv with collected frames data will always be '
'generated regardless of this setting.'),
Parameter('generate_csv', kind=boolean, default=True,
description='If set to ``True``, this will produce temporal fps data '
'in the results directory, in a file named fps.csv '
'Note: fps data will appear as discrete step-like values '
'in order to produce a more meainingfull representation,'
'a rolling mean can be applied.'),
Parameter('crash_check', kind=boolean, default=True,
description="""
Specifies wither the instrument should check for crashed content by examining
frame data. If this is set, ``execution_time`` instrument must also be installed.
The check is performed by using the measured FPS and exection time to estimate the expected
frames cound and comparing that against the measured frames count. The the ratio of
measured/expected is too low, then it is assumed that the content has crashed part way
during the run. What is "too low" is determined by ``crash_threshold``.
.. note:: This is not 100\% fool-proof. If the crash occurs sufficiently close to
workload's termination, it may not be detected. If this is expected, the
threshold may be adjusted up to compensate.
"""),
Parameter('crash_threshold', kind=float, default=0.7,
description="""
Specifies the threshold used to decided whether a measured/expected frames ration indicates
a content crash. E.g. a value of ``0.75`` means the number of actual frames counted is a
quarter lower than expected, it will treated as a content crash.
"""),
Parameter('dumpsys_period', kind=float, default=2, constraint=lambda x: x > 0,
description="""
Specifies the time period between calls to ``dumpsys SurfaceFlinger --latency`` in
seconds when collecting frame data. Using a lower value improves the granularity
of timings when recording actions that take a short time to complete. Note, this
will produce duplicate frame data in the raw dumpsys output, however, this is
filtered out in frames.csv. It may also affect the overall load on the system.
The default value of 2 seconds corresponds with the NUM_FRAME_RECORDS in
android/services/surfaceflinger/FrameTracker.h (as of the time of writing
currently 128) and a frame rate of 60 fps that is applicable to most devices.
"""),
Parameter('force_surfaceflinger', kind=boolean, default=False,
description="""
By default, the method to capture fps data is based on Android version.
If this is set to true, force the instrument to use the SurfaceFlinger method
regardless of its Android version.
"""),
]
def __init__(self, device, **kwargs):
super(FpsInstrument, self).__init__(device, **kwargs)
self.collector = None
self.outfile = None
self.fps_outfile = None
self.is_enabled = True
self.fps_method = ''
def validate(self):
if not pd or LooseVersion(pd.__version__) < LooseVersion('0.13.1'):
message = ('fps instrument requires pandas Python package (version 0.13.1 or higher) to be installed.\n'
'You can install it with pip, e.g. "sudo pip install pandas"')
raise InstrumentError(message)
if self.crash_check and not instrument_is_installed('execution_time'):
raise ConfigError('execution_time instrument must be installed in order to check for content crash.')
def setup(self, context):
workload = context.workload
if hasattr(workload, 'view'):
self.fps_outfile = os.path.join(context.output_directory, 'fps.csv')
self.outfile = os.path.join(context.output_directory, 'frames.csv')
# Android M brings a new method of collecting FPS data
if not self.force_surfaceflinger and (self.device.get_sdk_version() >= 23):
# gfxinfo takes in the package name rather than a single view/activity
# so there is no 'list_command' to run and compare against a list of
# views/activities. Additionally, clearing the stats requires the package
# so we need to clear for every package in the workload.
# Usually there is only one package, but some workloads may run multiple
# packages so each one must be reset before continuing
self.fps_method = 'gfxinfo'
runcmd = 'dumpsys gfxinfo {} framestats'
lstcmd = None
params = workload.package
params = [params] if isinstance(params, basestring) else params
for pkg in params:
self.device.execute('dumpsys gfxinfo {} reset'.format(pkg))
else:
self.fps_method = 'surfaceflinger'
runcmd = 'dumpsys SurfaceFlinger --latency {}'
lstcmd = 'dumpsys SurfaceFlinger --list'
params = workload.view
self.device.execute('dumpsys SurfaceFlinger --latency-clear ')
self.collector = LatencyCollector(self.outfile, self.device, params or '',
self.keep_raw, self.logger, self.dumpsys_period,
runcmd, lstcmd, self.fps_method)
else:
self.logger.debug('Workload does not contain a view; disabling...')
self.is_enabled = False
def start(self, context):
if self.is_enabled:
self.logger.debug('Starting Frame Statistics collection...')
self.collector.start()
def stop(self, context):
if self.is_enabled and self.collector.is_alive():
self.logger.debug('Stopping Frame Statistics collection...')
self.collector.stop()
def update_result(self, context):
if self.is_enabled:
fps, frame_count, janks, not_at_vsync = float('nan'), 0, 0, 0
p90, p95, p99 = [float('nan')] * 3
data = pd.read_csv(self.outfile)
if not data.empty: # pylint: disable=maybe-no-member
# gfxinfo method has an additional file generated that contains statistics
stats_file = None
if self.fps_method == 'gfxinfo':
stats_file = os.path.join(os.path.dirname(self.outfile), 'gfxinfo.csv')
fp = FpsProcessor(data, extra_data=stats_file)
per_frame_fps, metrics = fp.process(self.collector.refresh_period, self.drop_threshold)
fps, frame_count, janks, not_at_vsync = metrics
if self.generate_csv:
per_frame_fps.to_csv(self.fps_outfile, index=False, header=True)
context.add_artifact('fps', path='fps.csv', kind='data')
p90, p95, p99 = fp.percentiles()
context.result.add_metric('FPS', fps)
context.result.add_metric('frame_count', frame_count)
context.result.add_metric('janks', janks, lower_is_better=True)
context.result.add_metric('not_at_vsync', not_at_vsync, lower_is_better=True)
context.result.add_metric('frame_time_90percentile', p90, 'ms', lower_is_better=True)
context.result.add_metric('frame_time_95percentile', p95, 'ms', lower_is_better=True)
context.result.add_metric('frame_time_99percentile', p99, 'ms', lower_is_better=True)
def slow_update_result(self, context):
result = context.result
if self.crash_check and result.has_metric('execution_time'):
self.logger.debug('Checking for crashed content.')
exec_time = result['execution_time'].value
fps = result['FPS'].value
frames = result['frame_count'].value
if all([exec_time, fps, frames]):
expected_frames = fps * exec_time
ratio = frames / expected_frames
self.logger.debug('actual/expected frames: {:.2}'.format(ratio))
if ratio < self.crash_threshold:
self.logger.error('Content for {} appears to have crashed.'.format(context.spec.label))
result.status = IterationResult.FAILED
result.add_event('Content crash detected (actual/expected frames: {:.2}).'.format(ratio))
class LatencyCollector(threading.Thread):
# Note: the size of the frames buffer for a particular surface is defined
# by NUM_FRAME_RECORDS inside android/services/surfaceflinger/FrameTracker.h.
# At the time of writing, this was hard-coded to 128. So at 60 fps
# (and there is no reason to go above that, as it matches vsync rate
# on pretty much all phones), there is just over 2 seconds' worth of
# frames in there. Hence the default sleep time of 2 seconds between dumps.
def __init__(self, outfile, device, activities, keep_raw, logger, dumpsys_period,
run_command, list_command, fps_method):
super(LatencyCollector, self).__init__()
self.outfile = outfile
self.device = device
self.keep_raw = keep_raw
self.logger = logger
self.dumpsys_period = dumpsys_period
self.stop_signal = threading.Event()
self.frames = []
self.last_ready_time = 0
self.refresh_period = VSYNC_INTERVAL
self.drop_threshold = self.refresh_period * 1000
self.exc = None
self.unresponsive_count = 0
if isinstance(activities, basestring):
activities = [activities]
self.activities = activities
self.command_template = run_command
self.list_command = list_command
self.fps_method = fps_method
# Based on the fps_method, setup the header for the csv,
# and set the process_trace_line function accordingly
if fps_method == 'surfaceflinger':
self.header = SurfaceFlingerFrame._fields
self.process_trace_line = self._process_surfaceflinger_line
else:
self.header = GfxInfoFrame._fields
self.process_trace_line = self._process_gfxinfo_line
self.re_frame = re.compile('([0-9]+,)+')
self.re_stats = re.compile('.*(percentile|frames|Number).*')
# Create a template summary text block that matches what gfxinfo gives after a reset
# - 133 is the default ms value for percentiles after reset
self.summary = collections.OrderedDict((('Total frames rendered', 0),
('Janky frames', 0),
('90th percentile', 133),
('95th percentile', 133),
('99th percentile', 133),
('Number Missed Vsync', 0),
('Number High input latency', 0),
('Number Slow UI thread', 0),
('Number Slow bitmap uploads', 0),
('Number Slow issue draw commands', 0)))
def run(self):
try:
self.logger.debug('Frame Statistics collection started. Method: ' + self.fps_method)
self.stop_signal.clear()
fd, temp_file = tempfile.mkstemp()
self.logger.debug('temp file: {}'.format(temp_file))
wfh = os.fdopen(fd, 'wb')
try:
view_list = self.activities
while not self.stop_signal.is_set():
# If a list_command is provided, set the view_list to be its output
# Then check for each activity in this list and if there is a match,
# process the output. If no command is provided, then always process.
if self.list_command:
view_list = self.device.execute(self.list_command).split()
for activity in self.activities:
if activity in view_list:
wfh.write(self.device.execute(self.command_template.format(activity)))
time.sleep(self.dumpsys_period)
finally:
wfh.close()
# TODO: this can happen after the run during results processing
with open(temp_file) as fh:
text = fh.read().replace('\r\n', '\n').replace('\r', '\n')
for line in text.split('\n'):
line = line.strip()
if line:
self.process_trace_line(line)
if self.keep_raw:
raw_file = os.path.join(os.path.dirname(self.outfile), self.fps_method + '.raw')
shutil.copy(temp_file, raw_file)
os.unlink(temp_file)
except (DeviceNotRespondingError, TimeoutError): # pylint: disable=W0703
raise
except Exception, e: # pylint: disable=W0703
self.logger.warning('Exception on collector thread: {}({})'.format(e.__class__.__name__, e))
self.exc = WorkerThreadError(self.name, sys.exc_info())
self.logger.debug('Frame Statistics collection stopped.')
with open(self.outfile, 'w') as wfh:
writer = csv.writer(wfh)
writer.writerow(self.header)
writer.writerows(self.frames)
self.logger.debug('Frames data written.')
# gfxinfo outputs its own summary statistics for the run.
# No point calculating those from the raw data, so store in its own file for later use.
if self.fps_method == 'gfxinfo':
stats_file = os.path.join(os.path.dirname(self.outfile), 'gfxinfo.csv')
with open(stats_file, 'w') as wfh:
writer = csv.writer(wfh)
writer.writerows(zip(self.summary.keys(), self.summary.values()))
self.logger.debug('Gfxinfo summary data written.')
def stop(self):
self.stop_signal.set()
self.join()
if self.unresponsive_count:
message = 'LatencyCollector was unrepsonsive {} times.'.format(self.unresponsive_count)
if self.unresponsive_count > 10:
self.logger.warning(message)
else:
self.logger.debug(message)
if self.exc:
raise self.exc # pylint: disable=E0702
self.logger.debug('Frame Statistics complete.')
def _process_surfaceflinger_line(self, line):
parts = line.split()
if len(parts) == 3:
frame = SurfaceFlingerFrame(*map(int, parts))
if frame.frame_ready_time <= self.last_ready_time:
return # duplicate frame
if (frame.frame_ready_time - frame.desired_present_time) > self.drop_threshold:
self.logger.debug('Dropping bogus frame {}.'.format(line))
return # bogus data
self.last_ready_time = frame.frame_ready_time
self.frames.append(frame)
elif len(parts) == 1:
self.refresh_period = int(parts[0])
self.drop_threshold = self.refresh_period * 1000
elif 'SurfaceFlinger appears to be unresponsive, dumping anyways' in line:
self.unresponsive_count += 1
else:
self.logger.warning('Unexpected SurfaceFlinger dump output: {}'.format(line))
def _process_gfxinfo_line(self, line):
if 'No process found for' in line:
self.unresponsive_count += 1
return
# Process lines related to the frame data
match = self.re_frame.match(line)
if match:
data = match.group(0)[:-1]
data = map(int, data.split(','))
frame = GfxInfoFrame(*data)
if frame not in self.frames:
if frame.Flags & GFXINFO_EXEMPT:
self.logger.debug('Dropping exempt frame {}.'.format(line))
else:
self.frames.append(frame)
return
# Process lines related to the summary statistics
match = self.re_stats.match(line)
if match:
data = match.group(0)
title, value = data.split(':', 1)
title = title.strip()
value = value.strip()
if title in self.summary:
if 'ms' in value:
value = value.strip('ms')
if '%' in value:
value = value.split()[0]
self.summary[title] = int(value)
|
Sticklyman1936/workload-automation
|
wlauto/instrumentation/fps/__init__.py
|
Python
|
apache-2.0
| 21,087
|
_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py'
model = dict(
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=8,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=False),
style='pytorch',
init_cfg=dict(
type='Pretrained',
checkpoint='open-mmlab://detectron2/resnext101_32x8d')))
# ResNeXt-101-32x8d model trained with Caffe2 at FB,
# so the mean and std need to be changed.
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675],
std=[57.375, 57.120, 58.395],
to_rgb=False)
# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)],
# multiscale_mode='range'
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='range',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(dataset=dict(pipeline=train_pipeline)),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
|
open-mmlab/mmdetection
|
configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py
|
Python
|
apache-2.0
| 1,878
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'maxim'
import os
# noinspection PyUnresolvedReferences
from models import *
from util import *
class ModelInfo(object):
def __init__(self, path, model_class, model_params, run_params):
self.path = path
self.model_class = model_class
self.model_params = model_params
self.run_params = run_params
def is_available(self):
return self.model_class is not None
def __repr__(self):
return repr({'path': self.path, 'class': self.model_class})
def get_model_info(path, strict=True):
model_params = _read_dict(os.path.join(path, 'model-params.txt'))
run_params = _read_dict(os.path.join(path, 'run-params.txt'))
model_class = run_params['model_class']
resolved_class = globals()[model_class]
if strict and resolved_class is None:
raise ModelNotAvailable(model_class)
return ModelInfo(path, resolved_class, model_params, run_params)
def _read_dict(path):
with open(path, 'r') as file_:
content = file_.read()
return str_to_obj(content)
class ModelNotAvailable(BaseException):
def __init__(self, model_class, *args):
super(ModelNotAvailable, self).__init__(*args)
self.model_class = model_class
|
maxim5/time-series-machine-learning
|
predict/model_io.py
|
Python
|
apache-2.0
| 1,222
|
'''
Created on Apr 30, 2012
@author: h87966
'''
from unit5.blog_datastore_memory import BlogMemoryDataStore
from unit5.blog_datastore_appengine import BlogAppengineDataStore
class BlogDataStoreFactory():
'''
classdocs
'''
storage_implementations = {'memory':BlogMemoryDataStore(),
'appengine':BlogAppengineDataStore()}
def __init__(self, storage_impl='appengine'):
'''
Constructor
'''
self.storage = self.storage_implementations[storage_impl]
def set_storage(self, blog_storage):
self.storage = blog_storage
def get_storage(self):
return self.storage
|
cdoremus/udacity-python_web_development-cs253
|
src/unit5/blog_datastore_factory.py
|
Python
|
apache-2.0
| 683
|
"""ACME protocol messages."""
import collections
import six
from acme import challenges
from acme import errors
from acme import fields
from acme import jose
from acme import util
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': ('The server could not connect to the client to verify the'
' domain'),
'dnssec': 'The server could not validate a DNSSEC signed domain',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
'malformed': 'The request message was malformed',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during domain verification',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
}
ERROR_TYPE_DESCRIPTIONS = dict(
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
def is_acme_error(err):
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
else:
return False
@six.python_2_unicode_compatible
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
typ = jose.Field('type', omitempty=True, default='about:blank')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail', omitempty=True)
@classmethod
def with_code(cls, code, **kwargs):
"""Create an Error instance with an ACME Error code.
:unicode code: An ACME error code, like 'dnssec'.
:kwargs: kwargs to pass to Error.
"""
if code not in ERROR_CODES:
raise ValueError("The supplied code: %s is not a known ACME error"
" code" % code)
typ = ERROR_PREFIX + code
return cls(typ=typ, **kwargs)
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
@property
def code(self):
"""ACME error code.
Basically self.typ without the ERROR_PREFIX.
:returns: error code if standard ACME code or ``None``.
:rtype: unicode
"""
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
def __str__(self):
return b' :: '.join(
part.encode('ascii', 'backslashreplace') for part in
(self.typ, self.description, self.detail, self.title)
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, collections.Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {} # type: dict
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {} # type: dict
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {} # type: dict
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
terms_of_service = jose.Field('terms-of-service', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caa-identities', omitempty=True)
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error) + ': ' + name)
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
return cls(jobj)
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: Deprecated. Do not use.
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: Deprecated. Do not use.
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
reason = jose.Field('reason')
|
jsha/letsencrypt
|
acme/acme/messages.py
|
Python
|
apache-2.0
| 14,264
|
# Copyright 2015, A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from a10_horizon.dashboard.a10networks.a10appliances import views
urlpatterns = patterns(
'a10_horizon.dashboard.a10networks.a10appliances.views',
url(r'^$', views.IndexView.as_view(), name='index')
# url(r'^deleteappliance$', views.DeleteApplianceView.as_view(), name='deleteappliance')
# url(r'^addimage$', views.AddImageView.as_view(), name="addimage")
)
|
a10networks/a10-horizon
|
a10_horizon/dashboard/a10networks/a10appliances/urls.py
|
Python
|
apache-2.0
| 1,056
|
import os
import random
import time
import json
from locust import HttpLocust, TaskSet, task
from lib.baseTaskSet import baseTaskSet
# TODO - make these config-driven
from lib.openstack.keystone import get_auth_token
from lib.openstack.nova import list_servers
from lib.openstack.nova import list_servers_detail
from lib.openstack.nova import list_server_detail
from lib.openstack.nova import create_server
from lib.openstack.nova import delete_server
from lib.openstack.nova import reboot_server
from lib.openstack.nova import resize_server
from lib.openstack.nova import confirm_resize_server
from lib.openstack.nova import revert_resize_server
from lib.openstack.nova import list_limits
from lib.openstack.nova import nova_get_server_id
class UserBehavior(baseTaskSet):
def on_start(self):
super(UserBehavior, self).on_start()
self.server_count = 0
self.min_server_count = 7
self.max_server_count = 10
self.auth_token, self.tenant_id, self.service_catalog = get_auth_token(self)
@task(2)
def nova_create_server(self):
flavor_id = random.choice([42,84])
response = create_server(self,
flavor_id=flavor_id,
name="server-%s-%s" % (self.id, self.server_count))
server_id = json.loads(response.content)['server']['id']
self.server_count += 1
time.sleep(random.choice([1,1,3,3,3,5,5,5,5,5,5,10,10,10,10,25]))
self.nova_resize_server()
self.output("server id: %s" % server_id)
@task(5)
def nova_resize_server(self):
server_id = nova_get_server_id(self)
flavor_id = random.choice([42,84,
9999, 9999, 9999, 9999,
9998, 9998, 9998, 9998,
451, 451, 451])
self.output("Resize server | %s | %s " % (server_id, flavor_id))
if server_id:
resize_server(self, server_id, flavor_id)
time.sleep(random.choice([5,9,9,9,9,10,10,10,10,10,10,10,10,15,15,15,25,25,25,25]))
choices = [1,1,1,1,1,2,2]
#if random.choice(choices) %2 != 0:
if choices:
self.output("RESIZE YUSSSS!")
confirm_resize_server(self, server_id)
else:
revert_resize_server(self,server_id)
else:
pass
@task(1)
def nova_confirm_resize_server(self):
server_id = nova_get_server_id(self)
confirm_resize_server(self, server_id)
@task(1)
def nova_revert_resize_server(self):
server_id = nova_get_server_id(self)
revert_resize_server(self, server_id)
@task(2)
def nova_reboot_server(self):
server_id = nova_get_server_id(self)
reboot_server(self, server_id)
time.sleep(random.choice([1,1,1,1,3,3,3,5,10,25]))
#@task(1)
def nova_delete_server(self):
server_id = nova_get_server_id(self)
delete_server(self, server_id)
@task(3)
def nova_list_servers(self):
self.output("LIST_SERVERS")
response = list_servers(self)
@task(3)
def check_server_pool(self):
response = list_servers(self)
servers = json.loads(response.content)['servers']
if len(servers) < self.min_server_count:
self.nova_create_server()
elif len(servers) == self.max_server_count:
self.nova_delete_server()
@task(4)
def nova_list_servers_detail(self):
self.output("LIST_SERVERS_DETAIL")
list_servers_detail(self)
@task(4)
def nova_list_limits(self):
list_limits(self)
@task(3)
def keystone_auth_token(self):
self.auth_token, self.tenant_id, self.service_catalog = get_auth_token(self)
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait=500
max_wait=5000
|
pcrews/rannsaka
|
test_files/server_exp2.py
|
Python
|
apache-2.0
| 3,914
|
"""Base class for IKEA TRADFRI."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
import logging
from typing import Any
from pytradfri.command import Command
from pytradfri.device import Device
from pytradfri.device.air_purifier import AirPurifier
from pytradfri.device.air_purifier_control import AirPurifierControl
from pytradfri.device.blind import Blind
from pytradfri.device.blind_control import BlindControl
from pytradfri.device.light import Light
from pytradfri.device.light_control import LightControl
from pytradfri.device.signal_repeater_control import SignalRepeaterControl
from pytradfri.device.socket import Socket
from pytradfri.device.socket_control import SocketControl
from pytradfri.error import PytradfriError
from homeassistant.core import callback
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def handle_error(
func: Callable[[Command | list[Command]], Any]
) -> Callable[[str], Any]:
"""Handle tradfri api call error."""
@wraps(func)
async def wrapper(command: Command | list[Command]) -> None:
"""Decorate api call."""
try:
await func(command)
except PytradfriError as err:
_LOGGER.error("Unable to execute command %s: %s", command, err)
return wrapper
class TradfriBaseClass(Entity):
"""Base class for IKEA TRADFRI.
All devices and groups should ultimately inherit from this class.
"""
_attr_should_poll = False
def __init__(
self,
device: Device,
api: Callable[[Command | list[Command]], Any],
gateway_id: str,
) -> None:
"""Initialize a device."""
self._api = handle_error(api)
self._device: Device = device
self._device_control: BlindControl | LightControl | SocketControl | SignalRepeaterControl | AirPurifierControl | None = (
None
)
self._device_data: Socket | Light | Blind | AirPurifier | None = None
self._gateway_id = gateway_id
self._refresh(device)
@callback
def _async_start_observe(self, exc: Exception | None = None) -> None:
"""Start observation of device."""
if exc:
self.async_write_ha_state()
_LOGGER.warning("Observation failed for %s", self._attr_name, exc_info=exc)
try:
cmd = self._device.observe(
callback=self._observe_update,
err_callback=self._async_start_observe,
duration=0,
)
self.hass.async_create_task(self._api(cmd))
except PytradfriError as err:
_LOGGER.warning("Observation failed, trying again", exc_info=err)
self._async_start_observe()
async def async_added_to_hass(self) -> None:
"""Start thread when added to hass."""
self._async_start_observe()
@callback
def _observe_update(self, device: Device) -> None:
"""Receive new state data for this device."""
self._refresh(device)
self.async_write_ha_state()
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
self._device = device
self._attr_name = device.name
class TradfriBaseDevice(TradfriBaseClass):
"""Base class for a TRADFRI device.
All devices should inherit from this class.
"""
@property
def device_info(self) -> DeviceInfo:
"""Return the device info."""
info = self._device.device_info
return DeviceInfo(
identifiers={(DOMAIN, self._device.id)},
manufacturer=info.manufacturer,
model=info.model_number,
name=self._attr_name,
sw_version=info.firmware_version,
via_device=(DOMAIN, self._gateway_id),
)
def _refresh(self, device: Device) -> None:
"""Refresh the device data."""
super()._refresh(device)
self._attr_available = device.reachable
|
aronsky/home-assistant
|
homeassistant/components/tradfri/base_class.py
|
Python
|
apache-2.0
| 4,045
|
import urllib
import twython
def Crowd_twitter(query):
consumer_key = '*****';
consumer_secret = '*****';
access_token = '******';
access_token_secret = '******';
client_args = {'proxies': {'https': 'http://10.93.0.37:3333'}}
t = twython.Twython(app_key=consumer_key,
app_secret=consumer_secret,
oauth_token=access_token,
oauth_token_secret=access_token_secret,
client_args = client_args)
# query=raw_input("What do you want to search for?");
# query.replace(" ","+");
output = t.search(q=query, result_type='popular', count=10) #purposely restricted to 10 users to protect from Spamming the Twitter server which could cause blacklisting of our server
#print output;
aggregater = []
for i in range(10):
aggregater.append(output[u'statuses'][i][u'text']);
happy = open("positive-words.txt",'r')
sad = open("negative-words.txt",'r')
ha = happy.readlines()
sa = sad.readlines()
happy.close()
sad.close()
for i in range(len(ha)):
ha[i]=ha[i].rstrip()
for i in range(len(sa)):
sa[i]=sa[i].rstrip()
#Put basic sentiment analysis on tweet
posi = 0;
negi = 0;
for i in range(10):
for j in range(len(ha)):
if(ha[j] in aggregater[i]):
posi += 1;
for j in range(len(sa)):
if(sa[j] in aggregater[i]):
negi += 1;
#print "<!DOCTYPE html>\n<html>\n<title>Crowd likes!</title>"
if posi > negi:
return "<h1>CROWD LOVES IT!!:-)</h1>"
elif posi<negi:
return "<h1>CROWD DOESN'T LIKE IT!! :-( </h1>"
else:
return "<h1>CROWD CAN'T DECIDE :-| !!</h1>"
def buildwebpage(product_fk,product_cr,product_am,product_eb,search_query):
# return images,links,names,prices
print "<!DOCTYPE html>\n<html>";
print "\n<h1><em><ul>WELCOME TO DEALERSITE - ONE STOP FOR ALL YOUR SHOPPING</ul></em></h1>\n<body>"
print "<h1>THIS IS WHAT THE CROWD THINKS OF "+search_query+":</h1>"
print Crowd_twitter(search_query)
print "\n<h1>AMAZON</h1>";
for i in range(3):
print "\n<h2>"+product_am[2][i]+"</h2>"
print "<img border=\"0\" src=\""+product_am[0][i]+"\" alt=\"Amazon\">"
print "<a href=\""+product_am[1][i]+"\">CLICK THIS TO TAKE YOU TO AMAZONS PAGE TO BUY THE PRODUCT</a>"
print "\n<p>PRICE : Rs."+product_am[3][i]+"</p>";
print "\n<h1>EBAY</h1>";
for i in range(3):
print "\n<h2>"+product_eb[2][i]+"</h2>"
print "<img border=\"0\" src=\""+product_eb[0][i]+"\" alt=\"EBay\">"
print "<a href=\""+product_eb[1][i]+"\">CLICK THIS TO TAKE YOU TO EBAYS PAGE TO BUY THE PRODUCT</a>"
print "\n<p>PRICE : Rs."+product_eb[3][i]+"</p>";
print "\n<h1>FLIPKART</h1>";
for i in range(3):
print "\n<h2>"+product_fk[2][i]+"</h2>"
print "<img border=\"0\" src=\""+product_fk[0][i]+"\" alt=\"Flipkart\">"
print "<a href=\""+product_fk[1][i]+"\">CLICK THIS TO TAKE YOU TO FLIPKARTS PAGE TO BUY THE PRODUCT</a>"
print "\n<p>PRICE : Rs."+product_fk[3][i]+"</p>";
print "\n<h1>CROMA RETAIL</h1>";
for i in range(3):
print "\n<h2>"+product_cr[2][i]+"</h2>"
print "<img border=\"0\" src=\""+product_cr[0][i]+"\" alt=\"CROMA\">"
print "<a href=\""+product_cr[1][i]+"\">CLICK THIS TO TAKE YOU TO CROMA PAGE TO BUY THE PRODUCT</a>"
print "\n<p>PRICE : "+product_cr[3][i]+"</p>";
print "<a href=\"/comparison.html\"><em><b>CLICK HERE FOR A COMPARISON OF DIFFERENT BRANDS</b></em></a>"
# print "<a href=\"/crowd.html\">CLICK HERE FOR WHAT THE CROWD THINKS OF THE PRODUCT</a>"
print "</body>\n</html>"
def link_fk_actu(product_image):
Flipkart_query = "http://www.flipkart.com/all-categories/pr?p%5B%5D=sort%3Drelevance&sid=search.flipkart.com&q=";
# print "\n\n\n\n\nLINK FK ACTUAL";
# print product_image;
names = [];
for i in range(3):
ind = product_image[i].index("data-pid=")+len("data-pid=\"");
indend = product_image[i].index("data-tracking-products",ind) - 2;
names.append(Flipkart_query + product_image[i][ind:indend]);
return names;
def price_fk(product_image):
# print "\n\n\n\n\nPRICE FK";
# print product_image;
names = [];
for i in range(3):
indend = product_image[i].index(";;");
ind = product_image[i].rfind(";",0,indend-1);
names.append(product_image[i][ind+1:indend]);
return names;
def name_fk(product_image):
# print "\n\n\n\n\nNAME FK";
# print product_image;
names = [];
for i in range(3):
ind = product_image[i].index("alt")+len("alt=\"");
names.append(product_image[i][ind:].split()[0]);
# product_image[i][ind:indend]);
return names;
def link_fk(product_link):
# print "\n\n\n\n\nLINK FK";
# print product_link;
beg_string = "www.flipkart.com";
links = [];
for i in range(3):
ind = product_link[i].index("a href=")+len("a href=\"");
indend = product_link[i].index("class") - 2;
links.append(beg_string+product_link[i][ind:indend]);
return links;
def image_fk(product_image):
img = [];
counter = 0;
for i in range(len(product_image)):
# print product_image[i];
try:
ind = product_image[i].index("data-src")+len("data-src=\"");
ind_end1 = 10000;
ind_end2 = 10000;
try:
ind_end1 = product_image[i].index("\"",ind);
except ValueError:
ind_end2 = product_image[i].index("\'",ind);
if ind_end2 < ind_end1:
ind_end = ind_end2;
else:
ind_end = ind_end1;
img.append(product_image[i][ind:ind_end]);
++counter;
except ValueError:
ind = product_image[i].index("src=")+len("src=\"");
ind_end1 = 10000;
ind_end2 = 10000;
try:
ind_end1 = product_image[i].index("\"",ind);
except ValueError:
ind_end2 = product_image[i].index("\'",ind);
if ind_end2 < ind_end1:
ind_end = ind_end2;
else:
ind_end = ind_end1;
img.append(product_image[i][ind:ind_end]);
++counter;
if counter == 3:
break;
return img[:3];
def process_fk(fk_lines):
product_image = [];
product_name = [];
product_otherone = [];
flag = 0;
counter = 0;
prev_line = "";
linenum = 0;
for l in fk_lines:
# print l;
# if "<div class=\'product" in l:
# flag = 1;
linenum += 1;
if "<div class='product" in l:
product_name.append(l);
flag = 1;
# if flag == 0 and "<img src=" in l:
# flag =1;
# continue;
if flag == 1 and "<img src=" in l:
product_image.append(l);
product_otherone.append(prev_line);
++counter;
if(counter==12):
break;
flag = 0;
prev_line = l;
product_image = product_image[1:11];
product_name = product_name[1:11];
product_otherone = product_otherone[0:10];
if(len(product_name)>=10):
teer = link_fk_actu(product_name);
else:
teer = link_fk(product_otherone);
return image_fk(product_image),teer,name_fk(product_image),price_fk(product_name);
#####################################################################################################
def process_am(am_lines):
# print am_lines;
links = [];
images = [];
names = [];
prices = [];
flag = 0;
counter = 0;
#urllib has a very strange behaviour when retrieving webpages - The server hands out slightly difficult code to parse.
flag = 0;
for l in am_lines:
# print 1;
try:
if ("<div id=\"srProductTitle" in l) and ("<a href=\"" in l) and ("src=\"" in l) and ("<br clear=\"all\" />" in l):
# print l;
# break;
ind =l.index("<a href=\"")+len("<a href=\"");
# print ind;
indend = l.index("\"",ind+1);
links.append(l[ind:indend]);
# i += 1;
ind =l.index("src=\"")+len("src=\"");
indend = l.index("\"",ind);
images.append(l[ind:indend]);
# i+=1;
ind =l.index("<br clear=\"all\" />")+len("<br clear=\"all\" />");
indend = l.index("</a",ind);
names.append(l[ind:indend]);
flag = 1;
# print links,images,names;
# for j in range(10): #generally keep going and stop when you find the necessary key word
# i += 1;
if ("<div class=\"newPrice\">" in l) or ("<div class=\"usedPrice\">" in l):
if flag == 1:
# print flag;
indend =l.index("</span></span></div>");
ind = l.rfind("</span>",0,indend) + len("</span>");
prices.append(l[ind:indend]);
# flag = 1;
counter +=1;
flag = 0;
except ValueError:
continue;
# break;
# if flag ==1:
# break;
if counter == 3:
break;
return images,links,names,prices;
###########################################################################################################
def process_cr(cr_lines):
links = [];
images = [];
names = [];
prices = [];
flag = 0;
counter = 0;
#urllib has a very strange behaviour when retrieving webpages - The server hands out slightly difficult code to parse.
flag = 0;
base = "http://www.cromaretail.com"
for l in cr_lines:
# print l;
try:
if ("<article><a title=" in l) and ("href" in l) and ("<img src=\"" in l):
# print l;
ind =l.index("<article><a title=")+len("<article><a title=\"");
indend = l.index("\"",ind+1);
names.append(l[ind:indend]);
ind =l.index("href=\"")+len("href=\"");
indend = l.index("\"",ind+1);
links.append(base+"/"+l[ind:indend]);
ind =l.index("<img src=\"")+len("<img src=\"");
indend = l.index("\"",ind+1);
images.append(base+l[ind:indend]);
flag =1;
if ("</span>" in l) and flag ==1:
ind =l.index("</span>")+len("</span>");
indend = l.index("<",ind+1);
prices.append(l[ind:indend]);
counter += 1;
flag =0;
except ValueError:
continue;
if counter == 3:
break;
return images,links,names,prices;
#######################################################################################################################
def process_eb(eb_lines):
links = [];
images = [];
names = [];
prices = [];
flag = 0;
counter = 0;
#urllib has a very strange behaviour when retrieving webpages - The server hands out slightly difficult code to parse.
for i in range(len(eb_lines)):
# print l;
l = eb_lines[i];
try:
if (" class=\"lv-1st\"></a>" in l):
Link = eb_lines[i+12];
Image = eb_lines[i+14];
Name = eb_lines[i+14];
Price = eb_lines[i+45];
# print Link,Image,Name,Price,"\n\n\n=======================\n\n\n";
ind =Link.index("<a href=\"")+len("<a href=\"");
indend = Link.index("\"",ind+1);
links.append(Link[ind:indend]);
ind =Image.index("src=")+len("src=\"");
indend = Image.index("class",ind+1);
images.append(Image[ind:indend-2]);
ind =Name.index("alt=")+len("alt=\"");
indend = Name.index(" />",ind+1);
names.append(Name[ind:indend-1]);
ind =Price.index("</b>")+len("</b>");
indend = Price.index("<",ind+1);
prices.append(Price[ind:indend]);
counter += 1;
i += 50;
except ValueError:
continue;
if counter == 3:
# print images,"\n\n\n=======================\n\n\n",links,"\n\n\n=======================\n\n\n",names,"\n\n\n=======================\n\n\n",prices;
return images,links,names,prices;
# break;
if __name__=="__main__":
proxy = 'http://10.93.0.37:3333';
search_query = raw_input("Enter the name to compare : ");
search_query = search_query.replace(" ","+");
Flipkart_query = "http://www.flipkart.com/all-categories/pr?p%5B%5D=sort%3Drelevance&sid=search.flipkart.com&q="+search_query;
Amazon_query = "http://www.amazon.in/s/ref=nb_sb_noss_1?url=search-alias%3Daps&field-keywords="+search_query+"&rh=i%3Aaps%2Ck%3A"+search_query;
Croma_query = "http://www.cromaretail.com/productsearch.aspx?txtSearch="+search_query+"&x=-808&y=-85";
Ebay_query = "http://www.ebay.in/sch/i.html?_trksid=p2050601.m570.l1313.TR0.TRC0.X"+search_query+"&_nkw="+search_query+"&_sacat=0&_from=R40";
Flipkart = urllib.urlopen(Flipkart_query, proxies={'http': proxy})
Amazon = urllib.urlopen(Amazon_query, proxies={'http': proxy})
Croma = urllib.urlopen(Croma_query, proxies={'http': proxy})
Ebay = urllib.urlopen(Ebay_query, proxies={'http': proxy})
fk_lines = Flipkart.readlines();
am_lines = Amazon.readlines();
cr_lines = Croma.readlines();
eb_lines = Ebay.readlines();
product_fk = process_fk(fk_lines);
product_am = process_am(am_lines);
product_cr = process_cr(cr_lines);
product_eb = process_eb(eb_lines);
buildwebpage(product_fk,product_cr,product_am,product_eb,search_query);
# Crowd_twitter();
|
SuFizz/Dealer-Loves-Code
|
starter_first.py
|
Python
|
apache-2.0
| 14,255
|
"""
Copyright 2013 Shine Wang
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib
import re
from HTMLParser import HTMLParser
from courseClasses import Course, Lecture, Tutorial, Reserve
class CustomHTMLParser(HTMLParser):
"""this class reads a HTML stream, then parses out the "data" fields"""
def __init__(self, webData):
HTMLParser.__init__(self)
self.webData = webData
def handle_data(self, data):
"""takes out the data"""
self.webData.append(data.strip())
class WebParser:
""""A WebParser is created for each and every course,
to parse the corresponding web page"""
requestURL = "http://www.adm.uwaterloo.ca/cgi-bin/" \
"cgiwrap/infocour/salook.pl"
def __init__(self):
self.webData = []
self.index = -1
self.session = None
self.thisCourse = None
def run(self, courseString, sessionString):
"""this is the method that the main class can call
if successful, returns the Course class
if not, returns an error message"""
self.session = self.parseSession(sessionString)
if self.session is None:
return "SessionNameWrongError"
courseString = map(lambda x: x.upper(), courseString.split())
try:
self.thisCourse = Course(self.session, courseString[0],
courseString[1])
except:
return "CourseNameWrongError"
if self.getWebData(self.thisCourse):
return "WebPageError"
elif self.parseWebData():
return "CourseNotFoundError"
else:
self.processCourseInfo()
self.postProcess(self.thisCourse)
return self.thisCourse
def parseSession(self, sessionString):
try:
ret = "1"
ret += sessionString.split()[1][-2:] # last 2 digits of year
tempMap = (("fall", "9"), ("winter", "1"), ("spring", "5"))
for season in tempMap:
if season[0] in sessionString.lower():
ret += season[1]
return ret
except:
return None
def getWebData(self, course):
"""submits a POST query, initializes HTMLParser"""
try:
params = urllib.urlencode({"sess": course.session,
"subject": course.subject,
"cournum": course.catalogNumber})
page = urllib.urlopen(WebParser.requestURL, params)
parser = CustomHTMLParser(self.webData)
# we use .replace() because HTMLParser ignores " ",
# which would screwn up our table
parser.feed(page.read().replace(" ", " "))
except:
return "WebPageError"
def parseWebData(self):
"""We try to find the beginning of the desired table"""
# now, we find the start index and pass that on along
# with the webData
for i in xrange(len(self.webData)-3):
if self.webData[i] == self.thisCourse.subject \
and self.webData[i+2] == self.thisCourse.catalogNumber:
self.index = i
break
if self.index == -1: # website not found
return "CourseNotFound"
def processCourseInfo(self):
"""now, we do the heavy-duty processing of the data table"""
# sets basic attrs of thisCourse
self.thisCourse.units = self.webData[self.index+4]
self.thisCourse.title = self.webData[self.index+6]
while self.webData[self.index] != "Instructor":
self.index += 1
# processing row-by-row
while not self.endOfRow(self.webData[self.index]):
if self.webData[self.index] != "":
self.processSlot()
self.index += 1
if self.index == len(self.webData):
return
def processSlot(self):
"""we check to see if this is the BEGINNING of a valid row"""
if (self.webData[self.index+1][:3].upper() == "LEC"
or self.webData[self.index+1][:3].upper() == "LAB") \
and "ONLINE" not in self.webData[self.index+2]:
# we don't want online classes!
# processing a lecture row
lec = Lecture()
if self.processClass(lec, self.index, self.webData):
return
self.thisCourse.lectures.append(lec)
elif self.webData[self.index+1][:3].upper() == "TUT":
# processing a tutorial row
tut = Tutorial()
if self.processClass(tut, self.index, self.webData):
return
self.thisCourse.tutorials.append(tut)
elif self.webData[self.index][:7].upper() == "RESERVE":
# processing a reserve row
res = Reserve()
self.processReserve(res, self.index, self.webData)
if self.thisCourse.lectures:
self.thisCourse.lectures[-1].reserves.append(res)
# note: we leave out the TST (exam?) times for now
def processReserve(self, res, index, webData):
"""processing reservations for certain types of students"""
res.name = webData[index][9:]
# we remove the "only" suffix (which is annoyingly pointless)
if "only" in res.name:
res.name = res.name[:-5]
# also, the "students" suffx
if "students" in res.name or "Students" in res.name:
res.name = res.name[:-9]
# now, we merge the match list
while not webData[index].isdigit():
index += 1
# retriving enrollment numbers
res.enrlCap = int(webData[index])
res.enrlTotal = int(webData[index+1])
def processClass(self, lec, index, webData):
"""we process a typical lecture or tutorial row"""
attr1 = ["classNumber", "compSec", "campusLocation"]
for i in xrange(len(attr1)):
setattr(lec, attr1[i], webData[index+i].strip())
index += 6
attr2 = ["enrlCap", "enrlTotal", "waitCap", "waitTotal"]
for i in xrange(len(attr2)):
setattr(lec, attr2[i], int(webData[index+i]))
index += 4
# parsing the "Times Days/Date" field
match = re.search(r"([:\d]+)-([:\d]+)(\w+)", webData[index])
if not match:
# we return an error message in the "TBA" case
return "NoTimeError"
attr3 = ["startTime", "endTime", "days"]
for i in xrange(len(attr3)):
setattr(lec, attr3[i], match.group(i+1).strip())
index += 1
if len(webData[index].split()) == 2:
# sometimes, no building, room, and instructor will be given
# this is mostly for Laurier courses
lec.building, lec.room = webData[index].split()
lec.instructor = webData[index+1].strip()
def endOfRow(self, data):
"""returns true if the current data-cell is the last cell
of this course; else - false"""
# the last cell is of the form: ##/##-##/## or
# "Information last updated
if re.search(r"\d+/\d+-\d+/\d+", data) or \
"Information last updated" in data:
return True
else:
return False
def postProcess(self, course):
"""this function will convert the class times to minutes-past-
the-previous-midnight, and converts the days to numbers.
Also, some reservation-postprocessing"""
map(lambda x: x.calcMiscSeats(), course.lectures)
for lec in course.lectures:
lec.courseID = course.subject + " " + course.catalogNumber
for tut in course.tutorials:
tut.courseID = course.subject + " " + course.catalogNumber
for slot in course.lectures + course.tutorials:
# first, we convert time to 24hr time
# earliest start time for a class is 8:30am
# night classes start at/before 7:00pm
if 1 <= int(slot.startTime.split(":")[0]) <= 7:
slot.startTime, slot.endTime = \
map(lambda x: "{}:{}".format(str(int(x.split(":")[0])
+ 12), x[-2:]), [slot.startTime,
slot.endTime])
elif int(slot.startTime.split(":")[0]) > int(
slot.endTime.split(":")[0]):
# e.g. 12:00 to 1:00
slot.endTime = "{}:{}".format(str(int(
slot.endTime.split(":")[0])+12), slot.endTime[-2:])
# now, we write to slot.sTime, slot.eTime
# (minutes-past-midnight...)
slot.sTime, slot.eTime = map(lambda x: int(x[:2]) * 60 +
int(x[-2:]),
[slot.startTime, slot.endTime])
# we write to slot.ndays, where ndays is a string of numbers,
# 0->4
if "M" in slot.days:
slot.ndays += "0"
i = slot.days.find("T")
if i != -1 and (i == len(slot.days) - 1 or
slot.days[i+1] != 'h'):
# basically, if not Th (for Thursday)
slot.ndays += "1"
# now, for the rest of the days...
for i in [("W", "2"), ("Th", "3"), ("F", "4")]:
if i[0] in slot.days:
slot.ndays += i[1]
# we make a small adjustment to campusLocation,
# removing whitespace
slot.campusLocation = slot.campusLocation.split()[0]
# we make the prof name "first last" instead of
# "last,first middle"
if slot.instructor != "":
s = slot.instructor.split(" ")
for i in s:
if "," in i:
# we want the 2 words connected by the ","
slot.instructor = " ".join(reversed(list(
i.split(","))))
|
shinexwang/Classy
|
Main/webParser.py
|
Python
|
apache-2.0
| 10,545
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Top-level presubmit script for V8.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import sys
def _V8PresubmitChecks(input_api, output_api):
"""Runs the V8 presubmit checks."""
import sys
sys.path.append(input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools'))
from presubmit import CppLintProcessor
from presubmit import SourceProcessor
from presubmit import CheckGeneratedRuntimeTests
results = []
if not CppLintProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError("C++ lint check failed"))
if not SourceProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Copyright header, trailing whitespaces and two empty lines " \
"between declarations check failed"))
if not CheckGeneratedRuntimeTests(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Generated runtime tests check failed"))
return results
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
results.append(output_api.PresubmitPromptOrNotify(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.CheckOwners(
input_api, output_api, source_file_filter=None))
results.extend(_V8PresubmitChecks(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
return results
def _SkipTreeCheck(input_api, output_api):
"""Check the env var whether we want to skip tree check.
Only skip if src/version.cc has been updated."""
src_version = 'src/version.cc'
FilterFile = lambda file: file.LocalPath() == src_version
if not input_api.AffectedSourceFiles(
lambda file: file.LocalPath() == src_version):
return False
return input_api.environ.get('PRESUBMIT_TREE_CHECK') == 'skip'
def _CheckChangeLogFlag(input_api, output_api):
"""Checks usage of LOG= flag in the commit message."""
results = []
if input_api.change.BUG and not 'LOG' in input_api.change.tags:
results.append(output_api.PresubmitError(
'An issue reference (BUG=) requires a change log flag (LOG=). '
'Use LOG=Y for including this commit message in the change log. '
'Use LOG=N or leave blank otherwise.'))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
if not _SkipTreeCheck(input_api, output_api):
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
json_url='http://v8-status.appspot.com/current?format=json'))
return results
def GetPreferredTryMasters(project, change):
return {
'tryserver.v8': {
'v8_linux_rel': set(['defaulttests']),
'v8_linux_dbg': set(['defaulttests']),
'v8_linux_nosnap_rel': set(['defaulttests']),
'v8_linux_nosnap_dbg': set(['defaulttests']),
'v8_linux64_rel': set(['defaulttests']),
'v8_linux_arm_dbg': set(['defaulttests']),
'v8_linux_arm64_rel': set(['defaulttests']),
'v8_linux_layout_dbg': set(['defaulttests']),
'v8_mac_rel': set(['defaulttests']),
'v8_win_rel': set(['defaulttests']),
},
}
|
nextsmsversion/macchina.io
|
platform/JS/V8/v8-3.28.4/PRESUBMIT.py
|
Python
|
apache-2.0
| 7,096
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Converts the serialized examples to TFRecords for putting into a model."""
# TODO(alanesuhr): Factor out what should be in a lib and what should be in a
# binary.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import os
import random
from absl import app
from absl import flags
import apache_beam as beam
from language.xsp.data_preprocessing.nl_to_sql_example import NLToSQLExample
from language.xsp.model.model_config import load_config
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('examples_dir', '',
'The directory containing the examples.')
flags.DEFINE_list('filenames', None,
'The list of files to process containing NLToSQLExamples.')
flags.DEFINE_string('config', '', 'The path to a model config file.')
flags.DEFINE_string('tf_examples_dir', '',
'The location to put the Tensorflow examples.')
flags.DEFINE_string('output_vocab', '',
'The location of the output vocabulary.')
flags.DEFINE_bool('permute', False, 'Whether to permute the train schemas.')
flags.DEFINE_bool('generate_output', False,
'Whether to generate output sequences.')
flags.DEFINE_integer(
'num_spider_repeats', 7,
'The number of times to permute the Spider data tables (for train only).')
BEG_TOK = '[CLS]'
SEP_TOK = '[SEP]'
TAB_TOK = '[TAB]'
UNK_TOK = '[UNK]'
GENERATE_TYPE = 1
COPY_TYPE = 2
COL_TYPE_TO_TOK = {
'text': '[STR_COL]',
'number': '[NUM_COL]',
'others': '[OTH_COL]',
'time': '[TIME_COL]',
'boolean': '[BOOL_COL]',
}
class InputToken(
collections.namedtuple('InputToken', [
'wordpiece', 'index', 'copy_mask', 'segment_id',
'indicates_foreign_key', 'aligned'
])):
pass
class OutputAction(
collections.namedtuple('OutputAction', ['wordpiece', 'action_id', 'type'])):
pass
def add_context(key):
"""Adds context features required by the model."""
features = dict()
features['language'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'en']))
features['region'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'US_eng']))
features['type'] = tf.train.Feature(int64_list=tf.train.Int64List(value=[1]))
features['weight'] = tf.train.Feature(
float_list=tf.train.FloatList(value=[1.0]))
features['tag'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'all']))
features['key'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[key.encode('utf8')]))
return features
class ConvertToSequenceExampleDoFn(beam.DoFn):
"""DoFn for converting from NLToSQLExample to a TFRecord."""
def __init__(self, model_config, generate_output, permute, num_repeats,
*unused_args, **unused_kwargs):
self.model_config = model_config
self.input_vocabulary = None
self.output_vocabulary = None
self.permute = permute
self.num_repeats = num_repeats
if not self.permute and self.num_repeats > 1:
raise ValueError('Not permuting but num_repeats = ' +
str(self.num_repeats))
# This cache maps from a proto representing a schema to its string
# equivalent
# (NOTE: this assumes there's no randomness in the order of the tables,
# cols, etc.)
self.table_cache = dict()
self.generate_output = generate_output
def non_parallel_process(self, example):
# Load cache
if not self.input_vocabulary:
with tf.gfile.Open(
self.model_config.data_options.bert_vocab_path) as infile:
self.input_vocabulary = [
line.rstrip('\n') for line in infile.readlines()
]
if not self.output_vocabulary:
with tf.gfile.Open(FLAGS.output_vocab) as infile:
self.output_vocabulary = [
line.replace('\n', '', 1) for line in infile.readlines()
]
results = list()
for _ in range(self.num_repeats):
# Convert the input to an indexed sequence
input_conversion = self._convert_input_to_indexed_sequence(
example.model_input, random_permutation=self.permute)
if input_conversion is None:
return None
# input_tokens stores the raw wordpieces, its index in the vocabulary, and
# whether it is copiable
# The maps store tuples of table or column entities paired with their head
# index in input_tokens
input_tokens, table_index_map, column_index_map, base_idx = input_conversion
# Convert the output to an indexed sequence
output_actions = list()
if self.generate_output:
output_actions = self._convert_output_to_indexed_sequence(
example, table_index_map, column_index_map, base_idx)
if output_actions is None:
return None
raw_input_wordpieces = [
input_token.wordpiece for input_token in input_tokens
]
for action in output_actions:
if action.type == COPY_TYPE:
# Copy actions should only either
# 1. Copy from the input (i.e., before SEP)
# 2. Copy TAB or COL tokens
assert input_tokens[
action.action_id].index == self.input_vocabulary.index(
TAB_TOK) or input_tokens[action.action_id].index in [
self.input_vocabulary.index(col_tok)
for col_tok in COL_TYPE_TO_TOK.values()
] or action.action_id < raw_input_wordpieces.index(
SEP_TOK
), 'Unexpected copying action: %r with proto:\n%r' % (
input_tokens[action.action_id], example)
assert input_tokens[action.action_id].copy_mask == 1, (
'Copied, but copy mask is 0: %s at '
'index %d; copied action was %s') % (
input_tokens[action.action_id], action.action_id, action)
# Actually create the TF Example
results.append(
self._convert_to_sequence_example(
input_tokens, output_actions,
example.model_input.original_utterance).SerializeToString())
return results
def process(self, example):
results = self.non_parallel_process(example)
if results is not None:
for result in results:
yield result
def _convert_input_to_sequence_example(self, input_tokens, features):
features['source_wordpieces'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.index]))
for input_token in input_tokens
])
features['copiable_input'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.copy_mask]))
for input_token in input_tokens
])
copy_features = list()
foreign_key_features = list()
for input_token in input_tokens:
copy_features.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(
value=[input_token.wordpiece.encode('utf8')])))
foreign_key_features.append(
tf.train.Feature(
int64_list=tf.train.Int64List(
value=[input_token.indicates_foreign_key])))
features['copy_strings'] = tf.train.FeatureList(feature=copy_features)
features['segment_ids'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.segment_id]))
for input_token in input_tokens
])
features['indicates_foreign_key'] = tf.train.FeatureList(
feature=foreign_key_features)
features['utterance_schema_alignment'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.aligned]))
for input_token in input_tokens
])
def _convert_output_to_sequence_example(self, output_actions, features):
features['target_action_ids'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[action.action_id]))
for action in output_actions
])
features['target_action_types'] = tf.train.FeatureList(feature=[
tf.train.Feature(int64_list=tf.train.Int64List(value=[action.type]))
for action in output_actions
])
def _convert_to_sequence_example(self, input_tokens, output_actions,
utterance):
features = collections.OrderedDict()
self._convert_input_to_sequence_example(input_tokens, features)
self._convert_output_to_sequence_example(output_actions, features)
context_features = add_context(utterance)
return tf.train.SequenceExample(
context=tf.train.Features(feature=context_features),
feature_lists=tf.train.FeatureLists(feature_list=features))
def _get_vocab_index_or_unk(self, token, is_input=True):
# Note that this will return a 'Unicode equals warning' if the token is a
# unicode-only token
if is_input:
if token in self.input_vocabulary:
return self.input_vocabulary.index(token)
return self.input_vocabulary.index(UNK_TOK)
if token in self.output_vocabulary:
# Add 3 to this because there are 3 placeholder tokens in the output
# vocabulary that will be used during train (PAD, BEG, and END).
return self.output_vocabulary.index(token) + 3
print('Could not find token ' + token.encode('ascii', 'ignore') +
' in output vocabulary.')
def _convert_input_to_indexed_sequence(self, model_input, random_permutation):
# Everything is tokenized, but need to combine the utterance with the
# schema.
converted_wordpiece_tokens = list()
for wordpiece in model_input.utterance_wordpieces:
converted_wordpiece_tokens.append(
InputToken(('##' if '##' in wordpiece.wordpiece else '') +
model_input.original_utterance[
wordpiece.span_start_index:wordpiece.span_end_index],
self._get_vocab_index_or_unk(wordpiece.wordpiece), 1, 0, 0,
int(wordpiece.matches_to_schema)))
tokens = [
InputToken(BEG_TOK, self.input_vocabulary.index(BEG_TOK), 0, 0, 0, 0)
] + converted_wordpiece_tokens + [
InputToken(SEP_TOK, self.input_vocabulary.index(SEP_TOK), 0, 0, 0, 0)
]
table_index_map = list()
column_index_map = list()
# Add the table tokens
# Look it up in the cache
string_serial = ','.join([str(table) for table in model_input.tables])
if string_serial in self.table_cache and not random_permutation:
tokens_suffix, table_index_map, column_index_map = self.table_cache[
string_serial]
else:
# The input tokens contain the string to copy, rather than the wordpiece
# that's being embedded.
tokens_suffix = list()
order = list(range(len(model_input.tables)))
if random_permutation:
random.shuffle(order)
for table_segment_idx, table_idx in enumerate(order):
table = model_input.tables[table_idx]
table_index_map.append((len(tokens_suffix), table))
table_wordpieces_tokens = list()
for wordpiece in table.table_name_wordpieces:
table_wordpieces_tokens.append(
InputToken('', self._get_vocab_index_or_unk(wordpiece.wordpiece),
0, table_segment_idx + 1, 0,
int(table.matches_to_utterance)))
tokens_suffix.extend([
InputToken(
table.original_table_name, self.input_vocabulary.index(TAB_TOK),
1, table_segment_idx + 1, 0, int(table.matches_to_utterance))
] + table_wordpieces_tokens)
col_order = list(range(len(table.table_columns)))
if random_permutation:
random.shuffle(col_order)
# Add the column tokens for this table
for col_idx in col_order:
column = table.table_columns[col_idx]
column_index_map.append((len(tokens_suffix), column))
column_wordpiece_tokens = list()
for wordpiece in column.column_name_wordpieces:
column_wordpiece_tokens.append(
InputToken('',
self._get_vocab_index_or_unk(wordpiece.wordpiece), 0,
table_segment_idx + 1, int(column.is_foreign_key),
int(column.matches_to_utterance)))
tokens_suffix.extend([
InputToken(
column.original_column_name,
self.input_vocabulary.index(COL_TYPE_TO_TOK[
column.column_type]), 1, table_segment_idx + 1,
int(column.is_foreign_key), int(column.matches_to_utterance))
] + column_wordpiece_tokens)
# Update cache
if not random_permutation:
self.table_cache[string_serial] = (tokens_suffix, table_index_map,
column_index_map)
base_idx = len(tokens)
tokens.extend(tokens_suffix)
# If there are too many tokens, return None.
if len(tokens) > self.model_config.data_options.max_num_tokens:
return None
return tokens, table_index_map, column_index_map, base_idx
def _convert_output_to_indexed_sequence(self, example, table_index_map,
column_index_map, base_idx):
action_sequence = list()
gold_query = example.gold_sql_query
if len(
gold_query.actions) > self.model_config.data_options.max_decode_length:
return None
for action in gold_query.actions:
if action.symbol:
action_sequence.append(
OutputAction(action.symbol,
self._get_vocab_index_or_unk(action.symbol, False),
GENERATE_TYPE))
elif action.entity_copy:
found = False
if action.entity_copy.copied_table:
# Copied a table.
table = action.entity_copy.copied_table
for index, entity in table_index_map:
if entity.original_table_name == table.original_table_name:
action_sequence.append(
OutputAction(table.original_table_name, index + base_idx,
COPY_TYPE))
found = True
break
else:
# Copied a column.
column = action.entity_copy.copied_column
for index, entity in column_index_map:
if entity.original_column_name == column.original_column_name and entity.table_name == column.table_name:
action_sequence.append(
OutputAction(column.original_column_name, index + base_idx,
COPY_TYPE))
found = True
break
if not found:
return None
elif action.utterance_copy:
copy_wordpiece = action.utterance_copy
action_sequence.append(
OutputAction(copy_wordpiece.wordpiece,
copy_wordpiece.tokenized_index + 1, COPY_TYPE))
if None in [action.action_id for action in action_sequence]:
return None
return action_sequence
def creation_wrapper(process_dataset_fn):
"""Wrapper for creating the TFRecords files."""
# Create the tf examples directory.
if not tf.gfile.IsDirectory(FLAGS.tf_examples_dir):
print('Creating TFExamples directory at ' + FLAGS.tf_examples_dir)
tf.gfile.MkDir(FLAGS.tf_examples_dir)
# Get the model config.
model_config = load_config(FLAGS.config)
for filename in FLAGS.filenames:
if not filename:
continue
input_path = os.path.join(FLAGS.examples_dir, filename)
output_path = os.path.join(
FLAGS.tf_examples_dir,
filename.split('/')[-1].split('.')[0] + '.tfrecords')
permute = 'spider_train' in output_path and FLAGS.permute
num_repeats = FLAGS.num_spider_repeats if permute else 1
print('Processing %s. Permute: %r with %d repetitions' %
(filename, permute, num_repeats))
print('Writing to ' + output_path)
process_dataset_fn(input_path, model_config, permute, num_repeats,
output_path)
def process_dataset(input_path, model_config, permute, num_repeats,
output_path):
"""Function that processes a dataset without multiprocessing."""
fn = ConvertToSequenceExampleDoFn(
model_config,
FLAGS.generate_output,
permute=permute,
num_repeats=num_repeats)
with tf.gfile.Open(input_path) as infile:
examples = [NLToSQLExample().from_json(json.loads(line)) for line in infile]
with tf.python_io.TFRecordWriter(output_path) as writer:
num_examples_written = 0
total_examples = 0
for example in examples:
total_examples += 1
converteds = fn.non_parallel_process(example)
if converteds:
num_examples_written += 1
for converted in converteds:
writer.write(converted)
print('Wrote to %d / %d to %s' %
(num_examples_written, total_examples, output_path))
def main(unused_argv):
creation_wrapper(process_dataset)
if __name__ == '__main__':
app.run(main)
|
google-research/language
|
language/xsp/data_preprocessing/convert_to_tfrecords.py
|
Python
|
apache-2.0
| 17,885
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon import settings
from polyaxon.proxies.schemas.base import clean_config
from polyaxon.proxies.schemas.buffering import get_buffering_config
from polyaxon.proxies.schemas.charset import get_charset_config
from polyaxon.proxies.schemas.error_page import get_error_page_config
from polyaxon.proxies.schemas.gzip import get_gzip_config
from polyaxon.proxies.schemas.listen import get_listen_config
from polyaxon.proxies.schemas.locations import get_streams_locations_config
from polyaxon.proxies.schemas.logging import get_logging_config
from polyaxon.proxies.schemas.streams.gunicorn import (
get_gunicorn_config,
get_k8s_auth_config,
)
from polyaxon.proxies.schemas.streams.k8s import get_k8s_root_location_config
from polyaxon.proxies.schemas.timeout import get_timeout_config
def get_base_config():
config = [
get_listen_config(
is_proxy=False, port=settings.PROXIES_CONFIG.streams_target_port
)
]
config += [
get_logging_config(),
get_gzip_config(),
get_charset_config(),
get_buffering_config(),
get_timeout_config(),
get_gunicorn_config(),
get_k8s_auth_config(),
get_error_page_config(),
get_streams_locations_config(),
get_k8s_root_location_config(),
]
return clean_config(config)
|
polyaxon/polyaxon
|
core/polyaxon/proxies/schemas/streams/base.py
|
Python
|
apache-2.0
| 1,940
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import netaddr
from neutron_lib import exceptions
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron._i18n import _, _LE, _LI
from neutron.api import api_common
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
@property
def plugin(self):
return self._plugin
@property
def resource(self):
return self._resource
@property
def attr_info(self):
return self._attr_info
@property
def member_actions(self):
return self._member_actions
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
self.parent = parent
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
return api_common.is_native_pagination_supported(self._plugin)
def _is_native_sorting_supported(self):
return api_common.is_native_sorting_supported(self._plugin)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
@db_api.retry_db_errors
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
parent_id = kwargs.get(self._parent_id_name)
resource = self._item(request,
id,
do_authz=True,
field_list=None,
parent_id=parent_id)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = kwargs.pop('body', None)
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
ret_value = getattr(self._plugin, name)(*arg_list, **kwargs)
# It is simply impossible to predict whether one of this
# actions alters resource usage. For instance a tenant port
# is created when a router interface is added. Therefore it is
# important to mark as dirty resources whose counters have
# been altered by this operation
resource_registry.set_resources_dirty(request.context)
return ret_value
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
# Synchronize usage trackers, if needed
resource_registry.resync_resource(
request.context, self._resource, request.context.tenant_id)
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
@db_api.retry_db_errors
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
@db_api.retry_db_errors
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
def create(self, request, body=None, **kwargs):
self._notifier.info(request.context,
self._resource + '.create.start',
body)
return self._create(request, body, **kwargs)
@db_api.retry_db_errors
def _create(self, request, body, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
body = Controller.prepare_request_body(request.context,
body, True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
else:
items = [body]
# Ensure policy engine is initialized
policy.init()
# Store requested resource amounts grouping them by tenant
# This won't work with multiple resources. However because of the
# current structure of this controller there will hardly be more than
# one resource for which reservations are being made
request_deltas = collections.defaultdict(int)
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
if 'tenant_id' not in item[self._resource]:
# no tenant_id - no quota check
continue
tenant_id = item[self._resource]['tenant_id']
request_deltas[tenant_id] += 1
# Quota enforcement
reservations = []
try:
for (tenant, delta) in request_deltas.items():
reservation = quota.QUOTAS.make_reservation(
request.context,
tenant,
{self._resource: delta},
self._plugin)
reservations.append(reservation)
except n_exc.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
def notify(create_result):
# Ensure usage trackers for all resources affected by this API
# operation are marked as dirty
with request.context.session.begin():
# Commit the reservation(s)
for reservation in reservations:
quota.QUOTAS.commit_reservation(
request.context, reservation.reservation_id)
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=create_result,
method_name=notifier_method,
collection=self._collection,
action=action, original={})
return create_result
def do_create(body, bulk=False, emulated=False):
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if bulk and not emulated:
obj_creator = getattr(self._plugin, "%s_bulk" % action)
else:
obj_creator = getattr(self._plugin, action)
try:
if emulated:
return self._emulate_bulk_create(obj_creator, request,
body, parent_id)
else:
if self._collection in body:
# This is weird but fixing it requires changes to the
# plugin interface
kwargs.update({self._collection: body})
else:
kwargs.update({self._resource: body})
return obj_creator(request.context, **kwargs)
except Exception:
# In case of failure the plugin will always raise an
# exception. Cancel the reservation
with excutils.save_and_reraise_exception():
for reservation in reservations:
quota.QUOTAS.cancel_reservation(
request.context, reservation.reservation_id)
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
objs = do_create(body, bulk=True)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
if self._collection in body:
# Emulate atomic bulk behavior
objs = do_create(body, bulk=True, emulated=True)
return notify({self._collection: objs})
else:
obj = do_create(body)
return notify({self._resource: self._view(request.context,
obj)})
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
if request.body:
msg = _('Request body is not supported in DELETE.')
raise webob.exc.HTTPBadRequest(msg)
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
return self._delete(request, id, **kwargs)
@db_api.retry_db_errors
def _delete(self, request, id, **kwargs):
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
# A delete operation usually alters resource usage, so mark affected
# usage trackers as dirty
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.delete.end'
result = {self._resource: self._view(request.context, obj)}
notifier_payload = {self._resource + '_id': id}
notifier_payload.update(result)
self._notifier.info(request.context,
notifier_method,
notifier_payload)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original={})
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
return self._update(request, id, body, **kwargs)
@db_api.retry_db_errors
def _update(self, request, id, body, **kwargs):
body = Controller.prepare_request_body(request.context,
body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
parent_id = kwargs.get(self._parent_id_name)
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[n_const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying its own object, it's safe to return
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
orig_obj_tenant_id = orig_obj.get("tenant_id")
if (request.context.tenant_id != orig_obj_tenant_id or
orig_obj_tenant_id is None):
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
# Usually an update operation does not alter resource usage, but as
# there might be side effects it might be worth checking for changes
# in resource usage here as well (e.g: a tenant port is created when a
# router interface is added)
resource_registry.set_resources_dirty(request.context)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original=orig_object_copy)
return result
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
attributes.populate_tenant_id(context, res_dict, attr_info, is_create)
attributes.verify_attributes(res_dict, attr_info)
if is_create: # POST
attributes.fill_default_value(attr_info, res_dict,
webob.exc.HTTPBadRequest)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
attributes.convert_value(attr_info, res_dict, webob.exc.HTTPBadRequest)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
# NOTE(kevinbenton): we raise a 404 to hide the existence of the
# network from the tenant since they don't have access to it.
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)
|
igor-toga/local-snat
|
neutron/api/v2/base.py
|
Python
|
apache-2.0
| 33,385
|
import logging
import os
import json
import shutil
import sys
import datetime
import csv, math
from tld import get_tld
from collections import OrderedDict
from utils import Util
from components.data.data import Data
from components.iana.iana_transform import IanaTransform
from components.nc.network_context import NetworkContext
from multiprocessing import Process
import pandas as pd
import time
class OA(object):
def __init__(self,date,limit=500,logger=None):
self._initialize_members(date,limit,logger)
def _initialize_members(self,date,limit,logger):
# get logger if exists. if not, create new instance.
self._logger = logging.getLogger('OA.DNS') if logger else Util.get_logger('OA.DNS',create_file=False)
# initialize required parameters.
self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
self._date = date
self._table_name = "dns"
self._dns_results = []
self._limit = limit
self._data_path = None
self._ipynb_path = None
self._ingest_summary_path = None
self._dns_scores = []
self._dns_scores_headers = []
self._results_delimiter = '\t'
self._details_limit = 250
# get app configuration.
self._spot_conf = Util.get_spot_conf()
# get scores fields conf
conf_file = "{0}/dns_conf.json".format(self._scrtip_path)
self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)
# initialize data engine
self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '')
self._engine = Data(self._db,self._table_name ,self._logger)
def start(self):
####################
start = time.time()
####################
self._create_folder_structure()
self._add_ipynb()
self._get_dns_results()
self._add_tld_column()
self._add_reputation()
self._add_hh_and_severity()
self._add_iana()
self._add_network_context()
self._create_dns_scores_csv()
self._get_oa_details()
self._ingest_summary()
##################
end = time.time()
print(end - start)
##################
def _create_folder_structure(self):
# create date folder structure if it does not exist.
self._logger.info("Creating folder structure for OA (data and ipynb)")
self._data_path,self._ingest_summary_path,self._ipynb_path = Util.create_oa_folders("dns",self._date)
def _add_ipynb(self):
if os.path.isdir(self._ipynb_path):
self._logger.info("Adding edge investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Edge_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Edge_Investigation.ipynb".format(self._ipynb_path))
self._logger.info("Adding threat investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Threat_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Threat_Investigation.ipynb".format(self._ipynb_path))
else:
self._logger.error("There was a problem adding the IPython Notebooks, please check the directory exists.")
def _get_dns_results(self):
self._logger.info("Getting {0} Machine Learning Results from HDFS".format(self._date))
dns_results = "{0}/dns_results.csv".format(self._data_path)
# get hdfs path from conf file.
HUSER = self._spot_conf.get('conf', 'HUSER').replace("'", "").replace('"', '')
hdfs_path = "{0}/dns/scored_results/{1}/scores/dns_results.csv".format(HUSER,self._date)
# get results file from hdfs.
get_command = Util.get_ml_results_form_hdfs(hdfs_path,self._data_path)
self._logger.info("{0}".format(get_command))
# validate files exists
if os.path.isfile(dns_results):
# read number of results based in the limit specified.
self._logger.info("Reading {0} dns results file: {1}".format(self._date,dns_results))
self._dns_results = Util.read_results(dns_results,self._limit,self._results_delimiter)[:]
if len(self._dns_results) == 0: self._logger.error("There are not flow results.");sys.exit(1)
else:
self._logger.error("There was an error getting ML results from HDFS")
sys.exit(1)
# add headers.
self._logger.info("Adding headers")
self._dns_scores_headers = [ str(key) for (key,value) in self._conf['dns_score_fields'].items() ]
# add dns content.
self._dns_scores = [ conn[:] for conn in self._dns_results][:]
def _move_time_stamp(self,dns_data):
for dns in dns_data:
time_stamp = dns[1]
dns.remove(time_stamp)
dns.append(time_stamp)
return dns_data
def _create_dns_scores_csv(self):
dns_scores_csv = "{0}/dns_scores.csv".format(self._data_path)
dns_scores_final = self._move_time_stamp(self._dns_scores)
dns_scores_final.insert(0,self._dns_scores_headers)
Util.create_csv_file(dns_scores_csv,dns_scores_final)
# create bk file
dns_scores_bu_csv = "{0}/dns_scores_bu.csv".format(self._data_path)
Util.create_csv_file(dns_scores_bu_csv,dns_scores_final)
def _add_tld_column(self):
qry_name_col = self._conf['dns_results_fields']['dns_qry_name']
self._dns_scores = [conn + [ get_tld("http://" + str(conn[qry_name_col]), fail_silently=True) if "http://" not in str(conn[qry_name_col]) else get_tld(str(conn[qry_name_col]), fail_silently=True)] for conn in self._dns_scores ]
def _add_reputation(self):
# read configuration.
reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
rep_conf = json.loads(open(reputation_conf_file).read())
# initialize reputation services.
self._rep_services = []
self._logger.info("Initializing reputation services.")
for service in rep_conf:
config = rep_conf[service]
module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
self._rep_services.append(module.Reputation(config,self._logger))
# get columns for reputation.
rep_cols = {}
indexes = [ int(value) for key, value in self._conf["add_reputation"].items()]
self._logger.info("Getting columns to add reputation based on config file: dns_conf.json".format())
for index in indexes:
col_list = []
for conn in self._dns_scores:
col_list.append(conn[index])
rep_cols[index] = list(set(col_list))
# get reputation per column.
self._logger.info("Getting reputation for each service in config")
rep_services_results = []
if self._rep_services :
for key,value in rep_cols.items():
rep_services_results = [ rep_service.check(None,value) for rep_service in self._rep_services]
rep_results = {}
for result in rep_services_results:
rep_results = {k: "{0}::{1}".format(rep_results.get(k, ""), result.get(k, "")).strip('::') for k in set(rep_results) | set(result)}
self._dns_scores = [ conn + [ rep_results[conn[key]] ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _add_hh_and_severity(self):
# add hh value and sev columns.
dns_date_index = self._conf["dns_results_fields"]["frame_time"]
self._dns_scores = [conn + [ filter(None,conn[dns_date_index].split(" "))[3].split(":")[0]] + [0] + [0] for conn in self._dns_scores ]
def _add_iana(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
dns_qry_class_index = self._conf["dns_results_fields"]["dns_qry_class"]
dns_qry_type_index = self._conf["dns_results_fields"]["dns_qry_type"]
dns_qry_rcode_index = self._conf["dns_results_fields"]["dns_qry_rcode"]
self._dns_scores = [ conn + [ dns_iana.get_name(conn[dns_qry_class_index],"dns_qry_class")] + [dns_iana.get_name(conn[dns_qry_type_index],"dns_qry_type")] + [ dns_iana.get_name(conn[dns_qry_rcode_index],"dns_qry_rcode") ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + ["","",""] for conn in self._dns_scores ]
def _add_network_context(self):
nc_conf_file = "{0}/components/nc/nc_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(nc_conf_file):
nc_conf = json.loads(open(nc_conf_file).read())["NC"]
dns_nc = NetworkContext(nc_conf,self._logger)
ip_dst_index = self._conf["dns_results_fields"]["ip_dst"]
self._dns_scores = [ conn + [dns_nc.get_nc(conn[ip_dst_index])] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _get_oa_details(self):
self._logger.info("Getting OA DNS suspicious details/chord diagram")
# start suspicious connects details process.
p_sp = Process(target=self._get_suspicious_details)
p_sp.start()
# start chord diagram process.
p_dn = Process(target=self._get_dns_dendrogram)
p_dn.start()
p_sp.join()
p_dn.join()
def _get_suspicious_details(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
for conn in self._dns_scores:
# get data to query
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
hh=conn[self._conf["dns_score_fields"]["hh"]]
dns_qry_name = conn[self._conf["dns_score_fields"]["dns_qry_name"]]
self._get_dns_details(dns_qry_name,year,month,day,hh,dns_iana)
def _get_dns_details(self,dns_qry_name,year,month,day,hh,dns_iana):
limit = self._details_limit
edge_file ="{0}/edge-{1}_{2}_00.csv".format(self._data_path,dns_qry_name.replace("/","-"),hh)
edge_tmp ="{0}/edge-{1}_{2}_00.tmp".format(self._data_path,dns_qry_name.replace("/","-"),hh)
if not os.path.isfile(edge_file):
dns_qry = ("SELECT frame_time,frame_len,ip_dst,ip_src,dns_qry_name,dns_qry_class,dns_qry_type,dns_qry_rcode,dns_a FROM {0}.{1} WHERE y={2} AND m={3} AND d={4} AND dns_qry_name LIKE '%{5}%' AND h={6} LIMIT {7};").format(self._db,self._table_name,year,month,day,dns_qry_name,hh,limit)
# execute query
try:
self._engine.query(dns_qry,edge_tmp)
except:
self._logger.error("ERROR. Edge file couldn't be created for {0}, skipping this step".format(dns_qry_name))
else:
# add IANA to results.
if dns_iana:
update_rows = []
self._logger.info("Adding IANA translation to details results")
with open(edge_tmp) as dns_details_csv:
rows = csv.reader(dns_details_csv, delimiter=',', quotechar='|')
try:
next(rows)
update_rows = [[conn[0]] + [conn[1]] + [conn[2]] + [conn[3]] + [conn[4]] + [dns_iana.get_name(conn[5],"dns_qry_class")] + [dns_iana.get_name(conn[6],"dns_qry_type")] + [dns_iana.get_name(conn[7],"dns_qry_rcode")] + [conn[8]] for conn in rows]
update_rows = filter(None, update_rows)
header = [ "frame_time", "frame_len", "ip_dst","ip_src","dns_qry_name","dns_qry_class_name","dns_qry_type_name","dns_qry_rcode_name","dns_a" ]
update_rows.insert(0,header)
except IndexError:
pass
else:
self._logger.info("WARNING: NO IANA configured.")
# create edge file.
self._logger.info("Creating edge file:{0}".format(edge_file))
with open(edge_file,'wb') as dns_details_edge:
writer = csv.writer(dns_details_edge, quoting=csv.QUOTE_ALL)
if update_rows:
writer.writerows(update_rows)
else:
shutil.copy(edge_tmp,edge_file)
os.remove(edge_tmp)
def _get_dns_dendrogram(self):
limit = self._details_limit
for conn in self._dns_scores:
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
ip_dst=conn[self._conf["dns_score_fields"]["ip_dst"]]
self._get_dendro(self._db,self._table_name,ip_dst,year,month,day, limit)
def _get_dendro(self,db,table,ip_dst,year,month,day,limit):
dendro_file = "{0}/dendro-{1}.csv".format(self._data_path,ip_dst)
if not os.path.isfile(dendro_file):
dndro_qry = ("SELECT dns_a, dns_qry_name, ip_dst FROM (SELECT susp.ip_dst, susp.dns_qry_name, susp.dns_a FROM {0}.{1} as susp WHERE susp.y={2} AND susp.m={3} AND susp.d={4} AND susp.ip_dst='{5}' LIMIT {6}) AS tmp GROUP BY dns_a, dns_qry_name, ip_dst").format(db,table,year,month,day,ip_dst,limit)
# execute query
self._engine.query(dndro_qry,dendro_file)
def _ingest_summary(self):
# get date parameters.
yr = self._date[:4]
mn = self._date[4:6]
dy = self._date[6:]
self._logger.info("Getting ingest summary data for the day")
ingest_summary_cols = ["date","total"]
result_rows = []
df_filtered = pd.DataFrame()
ingest_summary_file = "{0}/is_{1}{2}.csv".format(self._ingest_summary_path,yr,mn)
ingest_summary_tmp = "{0}.tmp".format(ingest_summary_file)
if os.path.isfile(ingest_summary_file):
df = pd.read_csv(ingest_summary_file, delimiter=',')
#discards previous rows from the same date
df_filtered = df[df['date'].str.contains("{0}-{1}-{2}".format(yr, mn, dy)) == False]
else:
df = pd.DataFrame()
# get ingest summary.
ingest_summary_qry = ("SELECT frame_time, COUNT(*) as total "
" FROM {0}.{1}"
" WHERE y={2} AND m={3} AND d={4} "
" AND unix_tstamp IS NOT NULL AND frame_time IS NOT NULL"
" AND frame_len IS NOT NULL AND dns_qry_name IS NOT NULL"
" AND ip_src IS NOT NULL "
" AND (dns_qry_class IS NOT NULL AND dns_qry_type IS NOT NULL AND dns_qry_rcode IS NOT NULL ) "
" GROUP BY frame_time;")
ingest_summary_qry = ingest_summary_qry.format(self._db,self._table_name, yr, mn, dy)
results_file = "{0}/results_{1}.csv".format(self._ingest_summary_path,self._date)
self._engine.query(ingest_summary_qry,output_file=results_file,delimiter=",")
if os.path.isfile(results_file):
df_results = pd.read_csv(results_file, delimiter=',')
# Forms a new dataframe splitting the minutes from the time column
df_new = pd.DataFrame([["{0}-{1}-{2} {3}:{4}".format(yr, mn, dy,val['frame_time'].split(" ")[3].split(":")[0].zfill(2),val['frame_time'].split(" ")[3].split(":")[1].zfill(2)), int(val['total']) if not math.isnan(val['total']) else 0 ] for key,val in df_results.iterrows()],columns = ingest_summary_cols)
#Groups the data by minute
sf = df_new.groupby(by=['date'])['total'].sum()
df_per_min = pd.DataFrame({'date':sf.index, 'total':sf.values})
df_final = df_filtered.append(df_per_min, ignore_index=True)
df_final.to_csv(ingest_summary_tmp,sep=',', index=False)
os.remove(results_file)
os.rename(ingest_summary_tmp,ingest_summary_file)
else:
self._logger.info("No data found for the ingest summary")
|
kpeiruza/incubator-spot
|
spot-oa/oa/dns/dns_oa.py
|
Python
|
apache-2.0
| 17,663
|
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db
import os.path
db.create_all()
if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
else:
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
|
edfungus/Music-Server
|
app/db_create.py
|
Python
|
apache-2.0
| 466
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ComputeTimeCursor
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-pubsublite
# [START pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]
from google.cloud import pubsublite_v1
async def sample_compute_time_cursor():
# Create a client
client = pubsublite_v1.TopicStatsServiceAsyncClient()
# Initialize request argument(s)
request = pubsublite_v1.ComputeTimeCursorRequest(
topic="topic_value",
partition=986,
)
# Make the request
response = await client.compute_time_cursor(request=request)
# Handle the response
print(response)
# [END pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]
|
googleapis/python-pubsublite
|
samples/generated_samples/pubsublite_v1_generated_topic_stats_service_compute_time_cursor_async.py
|
Python
|
apache-2.0
| 1,540
|
import unittest
from biothings_explorer.registry import Registry
from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher
from .utils import get_apis
reg = Registry()
class TestSingleHopQuery(unittest.TestCase):
def test_disease2protein(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Protein",
input_cls="Disease",
input_id="DOID",
pred="related_to",
output_id="PR",
values="DOID:12143",
)
seqd.query()
self.assertTrue("PR:000007572" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["PR:000007572"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2genomicentity(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="GenomicEntity",
input_cls="Disease",
pred="related_to",
input_id="DOID",
output_id="SO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("SO:0000999" in seqd.G)
self.assertTrue("SO:0001853" in seqd.G)
def test_disease2chemicalsubstance(self):
"""Test gene-genomic entity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="ChemicalSubstance",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="CHEBI",
)
seqd.query()
self.assertTrue("CHEBI:65349" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["CHEBI:65349"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2gene(self):
"""Test gene-gene"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Gene", input_cls="Disease", input_id="DOID", values="DOID:12143"
)
seqd.query()
self.assertTrue("DHDDS" in seqd.G)
self.assertTrue("RPL3" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DHDDS"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2anatomy(self):
"""Test gene-anatomy"""
seqd = SingleEdgeQueryDispatcher(
output_cls="AnatomicalEntity",
input_cls="Disease",
input_id="DOID",
output_id="UBERON",
values="DOID:12143",
)
seqd.query()
self.assertTrue("UBERON:0007023" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["UBERON:0007023"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2ma(self):
"""Test gene-molecular_activity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="MolecularActivity",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("GO:0004935" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0004935"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2bp(self):
"""Test gene-biological_process"""
seqd = SingleEdgeQueryDispatcher(
output_cls="BiologicalProcess",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="GO",
)
seqd.query()
self.assertTrue("GO:0007605" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0007605"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cc(self):
"""Test gene-cellular_component"""
seqd = SingleEdgeQueryDispatcher(
output_cls="CellularComponent",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:0001816",
)
seqd.query()
self.assertTrue("GO:0030017" in seqd.G)
edges = seqd.G["DOID:DOID:0001816"]["GO:0030017"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cell(self):
"""Test gene-cell"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Cell",
input_cls="Disease",
input_id="DOID",
output_id="CL",
values="DOID:12143",
)
seqd.query()
self.assertTrue("CL:0000731" in seqd.G)
def test_disease2disease(self):
"""Test gene-disease"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Disease",
input_cls="Disease",
input_id="DOID",
output_id="DOID",
values="DOID:12143",
)
seqd.query()
self.assertTrue("DOID:225" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DOID:225"]
self.assertTrue("CORD Disease API" in get_apis(edges))
|
biothings/biothings_explorer
|
tests/test_apis/test_corddisease.py
|
Python
|
apache-2.0
| 4,836
|
# coding=utf-8
#
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""iWorkflow® Device Groups (shared) module for CM Cloud Managed Devices
REST URI
``http://localhost/mgmt/shared/resolver/device-groups/cm-cloud-managed-devices``
"""
from f5.iworkflow.resource import Collection
from f5.iworkflow.resource import OrganizingCollection
from f5.iworkflow.resource import Resource
class Cm_Cloud_Managed_Devices(OrganizingCollection):
def __init__(self, device_groups):
super(Cm_Cloud_Managed_Devices, self).__init__(device_groups)
self._meta_data['required_json_kind'] = \
'shared:resolver:device-groups:devicegroupstate'
self._meta_data['attribute_registry'] = {
'cm:shared:licensing:pools:licensepoolmembercollectionstate':
Devices_s
}
self._meta_data['allowed_lazy_attributes'] = [
Devices_s
]
class Devices_s(Collection):
def __init__(self, cm_cloud_managed_devices):
super(Devices_s, self).__init__(cm_cloud_managed_devices)
self._meta_data['allowed_lazy_attributes'] = [Device]
self._meta_data['required_json_kind'] = \
'shared:resolver:device-groups:devicegroupdevicecollectionstate'
self._meta_data['attribute_registry'] = {
'shared:resolver:device-groups:restdeviceresolverdevicestate': Device # NOQA
}
class Device(Resource):
def __init__(self, devices_s):
super(Device, self).__init__(devices_s)
self._meta_data['required_json_kind'] = \
'shared:resolver:device-groups:restdeviceresolverdevicestate'
self._meta_data['required_creation_parameters'] = {
'address', 'password', 'userName'}
self._meta_data['required_load_parameters'] = {'uuid', }
|
F5Networks/f5-common-python
|
f5/iworkflow/shared/resolver/device_groups/cm_cloud_managed_devices.py
|
Python
|
apache-2.0
| 2,330
|
#!/usr/bin/env python
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
from netmiko import ConnectHandler
from getpass import getpass
from routers import pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1
def main():
'''
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
'''
ip_address = raw_input("Please enter IP: ")
password = getpass()
pynet_rtr1['ip'] = ip_address
pynet_rtr2['ip'] = ip_address
pynet_jnpr_srx1['ip'] = ip_address
pynet_rtr1['password'] = password
pynet_rtr2['password'] = password
pynet_jnpr_srx1['password'] = password
#for each router send show arp command and print result
for router in (pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1):
ssh_conn = ConnectHandler(verbose=False, **router)
output = ssh_conn.send_command('show arp')
print ">>> {}: \n".format(ssh_conn.ip)
print output
print ">>>\n"
if __name__ == '__main__':
main()
|
adleff/python_ansible
|
class4/ex6.py
|
Python
|
apache-2.0
| 1,008
|
# Copyright 2012 majgis Contributors
#
# Individuals comprising majgis Contributors are identified in
# the NOTICE file found in the root directory of this project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# or
# in the file named LICENSE in the root directory of this project.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" From a list of folders, export msd documents from existing mxds, ArcGIS 10
"""
import os
from glob import glob
from arcpy import mapping
from symbologyFromArcMapDoc import MxdExtras
folders = [r'F:\Projects\NationalAtlas\ArcGIS_Server\Server', r'F:\Projects\NationalAtlas\ArcGIS_Server\Server\biodiversity']
searchPattern = '*.mxd'
ignores = ['Overlap']
tempMsg = "{0:>90} -> {1}"
newMsg = "TABLE: {0} FIELD: {1}"
mxdSuffix = ".mxd"
msdSuffix = ".msd"
for folder in folders:
mxdPaths = glob(os.path.join(folder, searchPattern))
for mxdPath in mxdPaths:
mxd = mapping.MapDocument(mxdPath)
lyrs = mapping.ListLayers(mxd)
mxde = MxdExtras(mxdPath)
msdPath = mxdPath.replace(mxdSuffix, msdSuffix)
for lyr in lyrs:
lyre = mxde[lyr.name]
joinTable = lyre.joinedTableName
joinField = lyre.symbologyShortFieldName
if joinTable:
newName = newMsg.format(joinTable, joinField)
else:
newName = lyr.name
#print tempMsg.format(lyr.name, newName)
lyr.name = newName
mxd.save()
#delete existing msd
if os.path.exists(msdPath):
os.remove(msdPath)
#export msd
mapping.ConvertToMSD(mxd,msdPath)
print msdPath
|
majgis/majgis
|
experiments/iterateArcMapDocsByFolder.py
|
Python
|
apache-2.0
| 2,315
|
# -*- coding: utf-8 -*-
"""
gspread
~~~~~~~
Google Spreadsheets client library.
"""
__version__ = '0.2.1'
__author__ = 'Anton Burnashev'
from .client import Client, login
from .models import Spreadsheet, Worksheet, Cell
from .exceptions import (GSpreadException, AuthenticationError,
SpreadsheetNotFound, NoValidUrlKeyFound,
IncorrectCellLabel, WorksheetNotFound,
UpdateCellError, RequestError)
|
CPedrini/TateTRES
|
gspread/__init__.py
|
Python
|
apache-2.0
| 475
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2010 Anso Labs, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nova` -- Cloud IaaS Platform
===================================
.. automodule:: nova
:platform: Unix
:synopsis: Infrastructure-as-a-Service Cloud platform.
.. moduleauthor:: Jesse Andrews <jesse@ansolabs.com>
.. moduleauthor:: Devin Carlen <devin.carlen@gmail.com>
.. moduleauthor:: Vishvananda Ishaya <vishvananda@yahoo.com>
.. moduleauthor:: Joshua McKenty <joshua@cognition.ca>
.. moduleauthor:: Manish Singh <yosh@gimp.org>
.. moduleauthor:: Andy Smith <andy@anarkystic.com>
"""
from exception import *
|
sorenh/cc
|
nova/__init__.py
|
Python
|
apache-2.0
| 1,336
|
# -*- coding: utf-8 -*-
import pytest
# ``py.test --runslow`` causes the entire testsuite to be run, including test
# that are decorated with ``@@slow`` (scaffolding tests).
# see http://pytest.org/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option # Noqa
## def pytest_addoption(parser):
## parser.addoption("--runslow", action="store_true", help="run slow tests")
## slow = pytest.mark.skipif(
## not pytest.config.getoption("--runslow"),
## reason="need --runslow option to run"
## )
|
bird-house/pyramid-phoenix
|
phoenix/tests/conftest.py
|
Python
|
apache-2.0
| 542
|
from steps.bdd_test_util import cli_call
def after_scenario(context, scenario):
if 'doNotDecompose' in scenario.tags:
print("Not going to decompose after scenario {0}, with yaml '{1}'".format(scenario.name, context.compose_yaml))
else:
if 'compose_yaml' in context:
print("Decomposing with yaml '{0}' after scenario {1}, ".format(context.compose_yaml, scenario.name))
context.compose_output, context.compose_error, context.compose_returncode = \
cli_call(context, ["docker-compose", "-f", context.compose_yaml, "kill"], expect_success=True)
context.compose_output, context.compose_error, context.compose_returncode = \
cli_call(context, ["docker-compose", "-f", context.compose_yaml, "rm","-f"], expect_success=True)
# now remove any other containers (chaincodes)
context.compose_output, context.compose_error, context.compose_returncode = \
cli_call(context, ["docker", "ps", "-qa"], expect_success=True)
if context.compose_returncode == 0:
# Remove each container
for containerId in context.compose_output.splitlines():
#print("docker rm {0}".format(containerId))
context.compose_output, context.compose_error, context.compose_returncode = \
cli_call(context, ["docker", "rm", containerId], expect_success=True)
|
ghaskins/obc-peer
|
openchain/peer/bddtests/environment.py
|
Python
|
apache-2.0
| 1,357
|
# -*- coding: utf-8 -*-
#
# Copyright(c) 2010 poweredsites.org
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import urllib
from tornado.web import HTTPError
from tornado.options import options
from poweredsites.libs import cache # cache decorator alias
def admin(method):
"""Decorate with this method to restrict to site admins."""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method == "GET":
url = self.get_login_url()
if "?" not in url:
url += "?" + urllib.urlencode(dict(next=self.request.full_url()))
self.redirect(url)
return
raise HTTPError(403)
elif not self.is_admin:
if self.request.method == "GET":
self.redirect(options.home_url)
return
raise HTTPError(403)
else:
return method(self, *args, **kwargs)
return wrapper
def staff(method):
"""Decorate with this method to restrict to site staff."""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method == "GET":
url = self.get_login_url()
if "?" not in url:
url += "?" + urllib.urlencode(dict(next=self.request.full_url()))
self.redirect(url)
return
raise HTTPError(403)
elif not self.is_staff:
if self.request.method == "GET":
self.redirect(options.home_url)
return
raise HTTPError(403)
else:
return method(self, *args, **kwargs)
return wrapper
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
Fix the redirect url with full_url.
Tornado use uri by default.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method == "GET":
url = self.get_login_url()
if "?" not in url:
url += "?" + urllib.urlencode(dict(next=self.request.full_url()))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
|
felinx/poweredsites
|
poweredsites/libs/decorators.py
|
Python
|
apache-2.0
| 3,016
|
# -*- coding: utf-8 -*-
"""
Problem Statement
Samantha and Sam are playing a game. They have 'N' balls in front of them, each ball numbered from 0 to 9, except the
first ball which is numbered from 1 to 9. Samantha calculates all the sub-strings of the number thus formed, one by one.
If the sub-string is S, Sam has to throw 'S' candies into an initially empty box. At the end of the game, Sam has to
find out the total number of candies in the box, T. As T can be large, Samantha asks Sam to tell T % (109+7) instead.
If Sam answers correctly, he can keep all the candies. Sam can't take all this Maths and asks for your help.
"""
__author__ = 'Danyang'
MOD = 1e9 + 7
class Solution(object):
def solve_TLE(self, cipher):
"""
O(N^2)
:param cipher: the cipher
"""
A = map(int, list(cipher))
f = A[0]
num = A[0]
sig = 1
for i in xrange(1, len(A)):
num = 10 * num + A[i]
sig *= 10
temp = num
temp_sig = sig
while temp_sig >= 1:
f += temp
f %= MOD
temp %= temp_sig
temp_sig /= 10
return int(f)
def solve(self, cipher):
"""
O(N)
example: 1234
1
12, 2
123, 23, 3
1234, 234, 34, 4
:param cipher:
:return:
"""
pre = [0 for _ in cipher]
pre[0] = int(cipher[0])
for i in xrange(1, len(cipher)):
pre[i] = (pre[i - 1] * 10 + int(cipher[i]) * (i + 1)) % MOD
s = 0
for elt in pre:
s = (s + elt) % MOD
return int(s)
if __name__ == "__main__":
import sys
f = open("0.in", "r")
# f = sys.stdin
solution = Solution()
# construct cipher
cipher = f.readline().strip()
# solve
s = "%s\n" % (solution.solve(cipher))
print s,
|
algorhythms/HackerRankAlgorithms
|
Sam and sub-strings.py
|
Python
|
apache-2.0
| 1,919
|
from functools import update_wrapper
from django.http import Http404, HttpResponseRedirect
from django.contrib.admin import ModelAdmin, actions
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth import logout as auth_logout, REDIRECT_FIELD_NAME
from django.contrib.contenttypes import views as contenttype_views
from django.views.decorators.csrf import csrf_protect
from django.db.models.base import ModelBase
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
from django.conf import settings
LOGIN_FORM_KEY = 'this_is_the_login_form'
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin', app_name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self.app_name = app_name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
if admin_class is not ModelAdmin and settings.DEBUG:
admin_class.validate(model)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.admin.models import LogEntry
from django.contrib.contenttypes.models import ContentType
if not LogEntry._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.admin' in your "
"INSTALLED_APPS setting in order to use the admin application.")
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or
'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import patterns, url
urls = super(MyAdminSite, self).get_urls()
urls += patterns('',
url(r'^my_view/$', self.admin_view(some_view))
)
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if LOGIN_FORM_KEY in request.POST and request.user.is_authenticated():
auth_logout(request)
if not self.has_permission(request):
if request.path == reverse('admin:logout',
current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
return self.login(request)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import patterns, url, include
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = patterns('',
url(r'^$',
wrap(self.index),
name='index'),
url(r'^logout/$',
wrap(self.logout),
name='logout'),
url(r'^password_change/$',
wrap(self.password_change, cacheable=True),
name='password_change'),
url(r'^password_change/done/$',
wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$',
wrap(self.i18n_javascript, cacheable=True),
name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$',
wrap(contenttype_views.shortcut),
name='view_on_site'),
url(r'^(?P<app_label>\w+)/$',
wrap(self.app_index),
name='app_list')
)
# Add in each model's views.
for model, model_admin in six.iteritems(self._registry):
urlpatterns += patterns('',
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name),
include(model_admin.urls))
)
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.app_name, self.name
def password_change(self, request):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'current_app': self.name,
'post_change_redirect': url
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
from django.contrib.auth.views import login
context = {
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
}
context.update(extra_context or {})
defaults = {
'extra_context': context,
'current_app': self.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_dict = {}
user = request.user
for model, model_admin in self._registry.items():
app_label = model._meta.app_label
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': app_label.title(),
'app_label': app_label,
'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
# Sort the apps alphabetically.
app_list = list(six.itervalues(app_dict))
app_list.sort(key=lambda x: x['name'])
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
context = {
'title': _('Site administration'),
'app_list': app_list,
}
context.update(extra_context or {})
return TemplateResponse(request,self.index_template or
'admin/index.html', context,
current_app=self.name)
def app_index(self, request, app_label, extra_context=None):
user = request.user
has_module_perms = user.has_module_perms(app_label)
app_dict = {}
for model, model_admin in self._registry.items():
if app_label == model._meta.app_label:
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_dict:
app_dict['models'].append(model_dict),
else:
# First time around, now that we know there's
# something to display, add in the necessary meta
# information.
app_dict = {
'name': app_label.title(),
'app_label': app_label,
'app_url': '',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
context = {
'title': _('%s administration') % capfirst(app_label),
'app_list': [app_dict],
}
context.update(extra_context or {})
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context, current_app=self.name)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
|
edisonlz/fruit
|
web_project/base/site-packages/django/contrib/admin/sites.py
|
Python
|
apache-2.0
| 18,705
|
# Copyright 2016 Rackspace Australia
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import fixtures
import jsonschema
import os
import requests
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
class fake_result(object):
def __init__(self, result):
self.status_code = 200
self.text = jsonutils.dumps(result)
real_request = requests.request
def fake_request(obj, url, method, **kwargs):
if url.startswith('http://127.0.0.1:123'):
return fake_result({'a': 1, 'b': 'foo'})
if url.startswith('http://127.0.0.1:124'):
return fake_result({'c': 3})
if url.startswith('http://127.0.0.1:125'):
return fake_result(jsonutils.loads(kwargs.get('data', '{}')))
return real_request(method, url, **kwargs)
class MetadataTest(test.TestCase, integrated_helpers.InstanceHelperMixin):
def setUp(self):
super(MetadataTest, self).setUp()
fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
self.start_service('conductor')
self.start_service('scheduler')
self.api = self.useFixture(
nova_fixtures.OSAPIFixture(api_version='v2.1')).api
self.start_service('compute')
# create a server for the tests
server = self._build_server(name='test')
server = self.api.post_server({'server': server})
self.server = self._wait_for_state_change(server, 'ACTIVE')
self.api_fixture = self.useFixture(nova_fixtures.OSMetadataServer())
self.md_url = self.api_fixture.md_url
# make sure that the metadata service returns information about the
# server we created above
def fake_get_fixed_ip_by_address(self, ctxt, address):
return {'instance_uuid': server['id']}
self.useFixture(
fixtures.MonkeyPatch(
'nova.network.neutron.API.get_fixed_ip_by_address',
fake_get_fixed_ip_by_address))
def test_lookup_metadata_root_url(self):
res = requests.request('GET', self.md_url, timeout=5)
self.assertEqual(200, res.status_code)
def test_lookup_metadata_openstack_url(self):
url = '%sopenstack' % self.md_url
res = requests.request('GET', url, timeout=5,
headers={'X-Forwarded-For': '127.0.0.2'})
self.assertEqual(200, res.status_code)
def test_lookup_metadata_data_url(self):
url = '%sopenstack/latest/meta_data.json' % self.md_url
res = requests.request('GET', url, timeout=5)
self.assertEqual(200, res.status_code)
j = jsonutils.loads(res.text)
self.assertIn('hostname', j)
self.assertEqual('test.novalocal', j['hostname'])
def test_lookup_external_service(self):
self.flags(
vendordata_providers=['StaticJSON', 'DynamicJSON'],
vendordata_dynamic_targets=[
'testing@http://127.0.0.1:123',
'hamster@http://127.0.0.1:123'
],
group='api'
)
self.useFixture(fixtures.MonkeyPatch(
'keystoneauth1.session.Session.request', fake_request))
url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url
res = requests.request('GET', url, timeout=5)
self.assertEqual(200, res.status_code)
j = jsonutils.loads(res.text)
self.assertEqual({}, j['static'])
self.assertEqual(1, j['testing']['a'])
self.assertEqual('foo', j['testing']['b'])
self.assertEqual(1, j['hamster']['a'])
self.assertEqual('foo', j['hamster']['b'])
def test_lookup_external_service_no_overwrite(self):
self.flags(
vendordata_providers=['DynamicJSON'],
vendordata_dynamic_targets=[
'testing@http://127.0.0.1:123',
'testing@http://127.0.0.1:124'
],
group='api'
)
self.useFixture(fixtures.MonkeyPatch(
'keystoneauth1.session.Session.request', fake_request))
url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url
res = requests.request('GET', url, timeout=5)
self.assertEqual(200, res.status_code)
j = jsonutils.loads(res.text)
self.assertNotIn('static', j)
self.assertEqual(1, j['testing']['a'])
self.assertEqual('foo', j['testing']['b'])
self.assertNotIn('c', j['testing'])
def test_lookup_external_service_passes_data(self):
# Much of the data we pass to the REST service is missing because of
# the way we've created the fake instance, but we should at least try
# and ensure we're passing _some_ data through to the external REST
# service.
self.flags(
vendordata_providers=['DynamicJSON'],
vendordata_dynamic_targets=[
'testing@http://127.0.0.1:125'
],
group='api'
)
self.useFixture(fixtures.MonkeyPatch(
'keystoneauth1.session.Session.request', fake_request))
url = '%sopenstack/2016-10-06/vendor_data2.json' % self.md_url
res = requests.request('GET', url, timeout=5)
self.assertEqual(200, res.status_code)
j = jsonutils.loads(res.text)
self.assertIn('instance-id', j['testing'])
self.assertTrue(uuidutils.is_uuid_like(j['testing']['instance-id']))
self.assertIn('hostname', j['testing'])
self.assertEqual(self.server['tenant_id'], j['testing']['project-id'])
self.assertIn('metadata', j['testing'])
self.assertIn('image-id', j['testing'])
self.assertIn('user-data', j['testing'])
def test_network_data_matches_schema(self):
self.useFixture(fixtures.MonkeyPatch(
'keystoneauth1.session.Session.request', fake_request))
url = '%sopenstack/latest/network_data.json' % self.md_url
res = requests.request('GET', url, timeout=5)
self.assertEqual(200, res.status_code)
# load the jsonschema for network_data
schema_file = os.path.normpath(os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../../doc/api_schemas/network_data.json"))
with open(schema_file, 'rb') as f:
schema = jsonutils.load(f)
jsonschema.validate(res.json(), schema)
|
rahulunair/nova
|
nova/tests/functional/test_metadata.py
|
Python
|
apache-2.0
| 7,360
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import re
import time
from datetime import datetime
from django.forms.formsets import formset_factory
from django.http import HttpResponse
from django.utils.functional import wraps
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from desktop.conf import TIME_ZONE
from desktop.lib.django_util import JsonResponse, render
from desktop.lib.json_utils import JSONEncoderForHTML
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import smart_str, smart_unicode
from desktop.lib.rest.http_client import RestException
from desktop.lib.view_util import format_duration_in_millis
from desktop.log.access import access_warn
from desktop.models import Document, Document2
from hadoop.fs.hadoopfs import Hdfs
from liboozie.oozie_api import get_oozie
from liboozie.credentials import Credentials
from liboozie.submission2 import Submission
from oozie.conf import OOZIE_JOBS_COUNT, ENABLE_CRON_SCHEDULING, ENABLE_V2
from oozie.forms import RerunForm, ParameterForm, RerunCoordForm, RerunBundleForm, UpdateCoordinatorForm
from oozie.models import Workflow as OldWorkflow, Job, utc_datetime_format, Bundle, Coordinator, get_link, History as OldHistory
from oozie.models2 import History, Workflow, WORKFLOW_NODE_PROPERTIES
from oozie.settings import DJANGO_APPS
from oozie.utils import convert_to_server_timezone
def get_history():
if ENABLE_V2.get():
return History
else:
return OldHistory
def get_workflow():
if ENABLE_V2.get():
return Workflow
else:
return OldWorkflow
LOG = logging.getLogger(__name__)
"""
Permissions:
A Workflow/Coordinator/Bundle can:
* be accessed only by its owner or a superuser or by a user with 'dashboard_jobs_access' permissions
* be submitted/modified only by its owner or a superuser
Permissions checking happens by calling:
* check_job_access_permission()
* check_job_edition_permission()
"""
def _get_workflows(user):
return [{
'name': workflow.name,
'owner': workflow.owner.username,
'value': workflow.uuid,
'id': workflow.id
} for workflow in [d.content_object for d in Document.objects.get_docs(user, Document2, extra='workflow2')]
]
def manage_oozie_jobs(request, job_id, action):
if request.method != 'POST':
raise PopupException(_('Use a POST request to manage an Oozie job.'))
job = check_job_access_permission(request, job_id)
check_job_edition_permission(job, request.user)
response = {'status': -1, 'data': ''}
try:
oozie_api = get_oozie(request.user)
params = None
if action == 'change':
pause_time_val = request.POST.get('pause_time')
if request.POST.get('clear_pause_time') == 'true':
pause_time_val = ''
end_time_val = request.POST.get('end_time')
if end_time_val:
end_time_val = convert_to_server_timezone(end_time_val, TIME_ZONE.get())
if pause_time_val:
pause_time_val = convert_to_server_timezone(pause_time_val, TIME_ZONE.get())
params = {'value': 'endtime=%s' % (end_time_val) + ';'
'pausetime=%s' % (pause_time_val) + ';'
'concurrency=%s' % (request.POST.get('concurrency'))}
elif action == 'ignore':
oozie_api = get_oozie(request.user, api_version="v2")
params = {
'type': 'action',
'scope': ','.join(job.aggreate(request.POST.get('actions').split())),
}
response['data'] = oozie_api.job_control(job_id, action, parameters=params)
response['status'] = 0
if 'notification' in request.POST:
request.info(_(request.POST.get('notification')))
except RestException, ex:
ex_message = ex.message
if ex._headers.get('oozie-error-message'):
ex_message = ex._headers.get('oozie-error-message')
msg = "Error performing %s on Oozie job %s: %s." % (action, job_id, ex_message)
LOG.exception(msg)
response['data'] = _(msg)
return JsonResponse(response)
def bulk_manage_oozie_jobs(request):
if request.method != 'POST':
raise PopupException(_('Use a POST request to manage the Oozie jobs.'))
response = {'status': -1, 'data': ''}
if 'job_ids' in request.POST and 'action' in request.POST:
jobs = request.POST.get('job_ids').split()
response = {'totalRequests': len(jobs), 'totalErrors': 0, 'messages': ''}
oozie_api = get_oozie(request.user)
for job_id in jobs:
job = check_job_access_permission(request, job_id)
check_job_edition_permission(job, request.user)
try:
oozie_api.job_control(job_id, request.POST.get('action'))
except RestException, ex:
LOG.exception("Error performing bulk operation for job_id=%s", job_id)
response['totalErrors'] = response['totalErrors'] + 1
response['messages'] += str(ex)
return JsonResponse(response)
def show_oozie_error(view_func):
def decorate(request, *args, **kwargs):
try:
return view_func(request, *args, **kwargs)
except RestException, ex:
LOG.exception("Error communicating with Oozie in %s", view_func.__name__)
detail = ex._headers.get('oozie-error-message', ex)
if 'Max retries exceeded with url' in str(detail) or 'Connection refused' in str(detail):
detail = _('The Oozie server is not running')
raise PopupException(_('An error occurred with Oozie.'), detail=detail)
return wraps(view_func)(decorate)
@show_oozie_error
def list_oozie_workflows(request):
kwargs = {'cnt': OOZIE_JOBS_COUNT.get(), 'filters': []}
if not has_dashboard_jobs_access(request.user):
kwargs['filters'].append(('user', request.user.username))
oozie_api = get_oozie(request.user)
if request.GET.get('format') == 'json':
just_sla = request.GET.get('justsla') == 'true'
if request.GET.get('startcreatedtime'):
kwargs['filters'].extend([('startcreatedtime', request.GET.get('startcreatedtime'))])
if request.GET.get('offset'):
kwargs['offset'] = request.GET.get('offset')
json_jobs = []
total_jobs = 0
if request.GET.getlist('status'):
kwargs['filters'].extend([('status', status) for status in request.GET.getlist('status')])
wf_list = oozie_api.get_workflows(**kwargs)
json_jobs = wf_list.jobs
total_jobs = wf_list.total
if request.GET.get('type') == 'progress':
json_jobs = [oozie_api.get_job(job.id) for job in json_jobs]
response = massaged_oozie_jobs_for_json(json_jobs, request.user, just_sla)
response['total_jobs'] = total_jobs
return JsonResponse(response, encoder=JSONEncoderForHTML)
return render('dashboard/list_oozie_workflows.mako', request, {
'user': request.user,
'jobs': [],
'has_job_edition_permission': has_job_edition_permission,
})
@show_oozie_error
def list_oozie_coordinators(request):
kwargs = {'cnt': OOZIE_JOBS_COUNT.get(), 'filters': []}
if not has_dashboard_jobs_access(request.user):
kwargs['filters'].append(('user', request.user.username))
oozie_api = get_oozie(request.user)
enable_cron_scheduling = ENABLE_CRON_SCHEDULING.get()
if request.GET.get('format') == 'json':
if request.GET.get('offset'):
kwargs['offset'] = request.GET.get('offset')
json_jobs = []
total_jobs = 0
if request.GET.getlist('status'):
kwargs['filters'].extend([('status', status) for status in request.GET.getlist('status')])
co_list = oozie_api.get_coordinators(**kwargs)
json_jobs = co_list.jobs
total_jobs = co_list.total
if request.GET.get('type') == 'progress':
json_jobs = [oozie_api.get_coordinator(job.id) for job in json_jobs]
response = massaged_oozie_jobs_for_json(json_jobs, request.user)
response['total_jobs'] = total_jobs
return JsonResponse(response, encoder=JSONEncoderForHTML)
return render('dashboard/list_oozie_coordinators.mako', request, {
'jobs': [],
'has_job_edition_permission': has_job_edition_permission,
'enable_cron_scheduling': enable_cron_scheduling,
})
@show_oozie_error
def list_oozie_bundles(request):
kwargs = {'cnt': OOZIE_JOBS_COUNT.get(), 'filters': []}
if not has_dashboard_jobs_access(request.user):
kwargs['filters'].append(('user', request.user.username))
oozie_api = get_oozie(request.user)
if request.GET.get('format') == 'json':
if request.GET.get('offset'):
kwargs['offset'] = request.GET.get('offset')
json_jobs = []
total_jobs = 0
if request.GET.getlist('status'):
kwargs['filters'].extend([('status', status) for status in request.GET.getlist('status')])
bundle_list = oozie_api.get_bundles(**kwargs)
json_jobs = bundle_list.jobs
total_jobs = bundle_list.total
if request.GET.get('type') == 'progress':
json_jobs = [oozie_api.get_coordinator(job.id) for job in json_jobs]
response = massaged_oozie_jobs_for_json(json_jobs, request.user)
response['total_jobs'] = total_jobs
return JsonResponse(response, encoder=JSONEncoderForHTML)
return render('dashboard/list_oozie_bundles.mako', request, {
'jobs': [],
'has_job_edition_permission': has_job_edition_permission,
})
@show_oozie_error
def list_oozie_workflow(request, job_id):
oozie_workflow = check_job_access_permission(request, job_id)
oozie_coordinator = None
if request.GET.get('coordinator_job_id'):
oozie_coordinator = check_job_access_permission(request, request.GET.get('coordinator_job_id'))
oozie_bundle = None
if request.GET.get('bundle_job_id'):
oozie_bundle = check_job_access_permission(request, request.GET.get('bundle_job_id'))
if oozie_coordinator is not None:
setattr(oozie_workflow, 'oozie_coordinator', oozie_coordinator)
if oozie_bundle is not None:
setattr(oozie_workflow, 'oozie_bundle', oozie_bundle)
oozie_parent = oozie_workflow.get_parent_job_id()
if oozie_parent:
oozie_parent = check_job_access_permission(request, oozie_parent)
workflow_data = None
credentials = None
doc = None
hue_workflow = None
workflow_graph = 'MISSING' # default to prevent loading the graph tab for deleted workflows
full_node_list = None
if ENABLE_V2.get():
try:
# To update with the new History document model
hue_coord = get_history().get_coordinator_from_config(oozie_workflow.conf_dict)
hue_workflow = (hue_coord and hue_coord.workflow) or get_history().get_workflow_from_config(oozie_workflow.conf_dict)
if hue_coord and hue_coord.workflow: hue_coord.workflow.document.doc.get().can_read_or_exception(request.user)
if hue_workflow: hue_workflow.document.doc.get().can_read_or_exception(request.user)
if hue_workflow:
full_node_list = hue_workflow.nodes
workflow_id = hue_workflow.id
wid = {
'id': workflow_id
}
doc = Document2.objects.get(type='oozie-workflow2', **wid)
new_workflow = get_workflow()(document=doc)
workflow_data = new_workflow.get_data()
else:
try:
workflow_data = Workflow.gen_workflow_data_from_xml(request.user, oozie_workflow)
except Exception, e:
LOG.exception('Graph data could not be generated from Workflow %s: %s' % (oozie_workflow.id, e))
workflow_graph = ''
credentials = Credentials()
except:
LOG.exception("Error generating full page for running workflow %s" % job_id)
else:
history = get_history().cross_reference_submission_history(request.user, job_id)
hue_coord = history and history.get_coordinator() or get_history().get_coordinator_from_config(oozie_workflow.conf_dict)
hue_workflow = (hue_coord and hue_coord.workflow) or (history and history.get_workflow()) or get_history().get_workflow_from_config(oozie_workflow.conf_dict)
if hue_coord and hue_coord.workflow: Job.objects.can_read_or_exception(request, hue_coord.workflow.id)
if hue_workflow: Job.objects.can_read_or_exception(request, hue_workflow.id)
if hue_workflow:
workflow_graph = hue_workflow.gen_status_graph(oozie_workflow)
full_node_list = hue_workflow.node_list
else:
workflow_graph, full_node_list = get_workflow().gen_status_graph_from_xml(request.user, oozie_workflow)
parameters = oozie_workflow.conf_dict.copy()
for action in oozie_workflow.actions:
action.oozie_coordinator = oozie_coordinator
action.oozie_bundle = oozie_bundle
if request.GET.get('format') == 'json':
return_obj = {
'id': oozie_workflow.id,
'status': oozie_workflow.status,
'progress': oozie_workflow.get_progress(full_node_list),
'graph': workflow_graph,
'actions': massaged_workflow_actions_for_json(oozie_workflow.get_working_actions(), oozie_coordinator, oozie_bundle)
}
return JsonResponse(return_obj, encoder=JSONEncoderForHTML)
oozie_slas = []
if oozie_workflow.has_sla:
oozie_api = get_oozie(request.user, api_version="v2")
params = {
'id': oozie_workflow.id,
'parent_id': oozie_workflow.id
}
oozie_slas = oozie_api.get_oozie_slas(**params)
return render('dashboard/list_oozie_workflow.mako', request, {
'oozie_workflow': oozie_workflow,
'oozie_coordinator': oozie_coordinator,
'oozie_bundle': oozie_bundle,
'oozie_parent': oozie_parent,
'oozie_slas': oozie_slas,
'hue_workflow': hue_workflow,
'hue_coord': hue_coord,
'parameters': parameters,
'has_job_edition_permission': has_job_edition_permission,
'workflow_graph': workflow_graph,
'layout_json': json.dumps(workflow_data['layout'], cls=JSONEncoderForHTML) if workflow_data else '',
'workflow_json': json.dumps(workflow_data['workflow'], cls=JSONEncoderForHTML) if workflow_data else '',
'credentials_json': json.dumps(credentials.credentials.keys(), cls=JSONEncoderForHTML) if credentials else '',
'workflow_properties_json': json.dumps(WORKFLOW_NODE_PROPERTIES, cls=JSONEncoderForHTML),
'doc1_id': doc.doc.get().id if doc else -1,
'subworkflows_json': json.dumps(_get_workflows(request.user), cls=JSONEncoderForHTML),
'can_edit_json': json.dumps(doc is None or doc.doc.get().is_editable(request.user))
})
@show_oozie_error
def list_oozie_coordinator(request, job_id):
kwargs = {'cnt': 50, 'filters': []}
kwargs['offset'] = request.GET.get('offset', 1)
if request.GET.getlist('status'):
kwargs['filters'].extend([('status', status) for status in request.GET.getlist('status')])
oozie_coordinator = check_job_access_permission(request, job_id, **kwargs)
# Cross reference the submission history (if any)
coordinator = get_history().get_coordinator_from_config(oozie_coordinator.conf_dict)
try:
if not ENABLE_V2.get():
coordinator = get_history().objects.get(oozie_job_id=job_id).job.get_full_node()
except:
LOG.exception("Ignoring error getting oozie job coordinator for job_id=%s", job_id)
oozie_bundle = None
if request.GET.get('bundle_job_id'):
try:
oozie_bundle = check_job_access_permission(request, request.GET.get('bundle_job_id'))
except:
LOG.exception("Ignoring error getting oozie bundle for job_id=%s", job_id)
if request.GET.get('format') == 'json':
actions = massaged_coordinator_actions_for_json(oozie_coordinator, oozie_bundle)
return_obj = {
'id': oozie_coordinator.id,
'status': oozie_coordinator.status,
'progress': oozie_coordinator.get_progress(),
'nextTime': format_time(oozie_coordinator.nextMaterializedTime),
'endTime': format_time(oozie_coordinator.endTime),
'actions': actions,
'total_actions': oozie_coordinator.total
}
return JsonResponse(return_obj, encoder=JSONEncoderForHTML)
oozie_slas = []
if oozie_coordinator.has_sla:
oozie_api = get_oozie(request.user, api_version="v2")
params = {
'id': oozie_coordinator.id,
'parent_id': oozie_coordinator.id
}
oozie_slas = oozie_api.get_oozie_slas(**params)
enable_cron_scheduling = ENABLE_CRON_SCHEDULING.get()
update_coord_form = UpdateCoordinatorForm(oozie_coordinator=oozie_coordinator)
return render('dashboard/list_oozie_coordinator.mako', request, {
'oozie_coordinator': oozie_coordinator,
'oozie_slas': oozie_slas,
'coordinator': coordinator,
'oozie_bundle': oozie_bundle,
'has_job_edition_permission': has_job_edition_permission,
'enable_cron_scheduling': enable_cron_scheduling,
'update_coord_form': update_coord_form,
})
@show_oozie_error
def list_oozie_bundle(request, job_id):
oozie_bundle = check_job_access_permission(request, job_id)
# Cross reference the submission history (if any)
bundle = None
try:
if ENABLE_V2.get():
bundle = get_history().get_bundle_from_config(oozie_bundle.conf_dict)
else:
bundle = get_history().objects.get(oozie_job_id=job_id).job.get_full_node()
except:
LOG.exception("Ignoring error getting oozie job bundle for job_id=%s", job_id)
if request.GET.get('format') == 'json':
return_obj = {
'id': oozie_bundle.id,
'status': oozie_bundle.status,
'progress': oozie_bundle.get_progress(),
'endTime': format_time(oozie_bundle.endTime),
'actions': massaged_bundle_actions_for_json(oozie_bundle)
}
return HttpResponse(json.dumps(return_obj).replace('\\\\', '\\'), content_type="application/json")
return render('dashboard/list_oozie_bundle.mako', request, {
'oozie_bundle': oozie_bundle,
'bundle': bundle,
'has_job_edition_permission': has_job_edition_permission,
})
@show_oozie_error
def list_oozie_workflow_action(request, action):
try:
action = get_oozie(request.user).get_action(action)
workflow = check_job_access_permission(request, action.id.split('@')[0])
except RestException, ex:
msg = _("Error accessing Oozie action %s.") % (action,)
LOG.exception(msg)
raise PopupException(msg, detail=ex.message)
oozie_coordinator = None
if request.GET.get('coordinator_job_id'):
oozie_coordinator = check_job_access_permission(request, request.GET.get('coordinator_job_id'))
oozie_bundle = None
if request.GET.get('bundle_job_id'):
oozie_bundle = check_job_access_permission(request, request.GET.get('bundle_job_id'))
workflow.oozie_coordinator = oozie_coordinator
workflow.oozie_bundle = oozie_bundle
oozie_parent = workflow.get_parent_job_id()
if oozie_parent:
oozie_parent = check_job_access_permission(request, oozie_parent)
return render('dashboard/list_oozie_workflow_action.mako', request, {
'action': action,
'workflow': workflow,
'oozie_coordinator': oozie_coordinator,
'oozie_bundle': oozie_bundle,
'oozie_parent': oozie_parent,
})
@show_oozie_error
def get_oozie_job_log(request, job_id):
oozie_api = get_oozie(request.user, api_version="v2")
check_job_access_permission(request, job_id)
kwargs = {'logfilter' : []}
if request.GET.get('format') == 'json':
if request.GET.get('recent'):
kwargs['logfilter'].extend([('recent', val) for val in request.GET.get('recent').split(':')])
if request.GET.get('limit'):
kwargs['logfilter'].extend([('limit', request.GET.get('limit'))])
if request.GET.get('loglevel'):
kwargs['logfilter'].extend([('loglevel', request.GET.get('loglevel'))])
if request.GET.get('text'):
kwargs['logfilter'].extend([('text', request.GET.get('text'))])
status_resp = oozie_api.get_job_status(job_id)
log = oozie_api.get_job_log(job_id, **kwargs)
return_obj = {
'id': job_id,
'status': status_resp['status'],
'log': log,
}
return JsonResponse(return_obj, encoder=JSONEncoderForHTML)
@show_oozie_error
def list_oozie_info(request):
api = get_oozie(request.user)
configuration = api.get_configuration()
oozie_status = api.get_oozie_status()
instrumentation = {}
metrics = {}
if 'org.apache.oozie.service.MetricsInstrumentationService' in [c.strip() for c in configuration.get('oozie.services.ext', '').split(',')]:
api2 = get_oozie(request.user, api_version="v2")
metrics = api2.get_metrics()
else:
instrumentation = api.get_instrumentation()
return render('dashboard/list_oozie_info.mako', request, {
'instrumentation': instrumentation,
'metrics': metrics,
'configuration': configuration,
'oozie_status': oozie_status,
})
@show_oozie_error
def list_oozie_sla(request):
oozie_api = get_oozie(request.user, api_version="v2")
if request.method == 'POST':
params = {}
job_name = request.POST.get('job_name')
if re.match('.*-oozie-oozi-[WCB]', job_name):
params['id'] = job_name
params['parent_id'] = job_name
else:
params['app_name'] = job_name
if 'useDates' in request.POST:
if request.POST.get('start'):
params['nominal_start'] = request.POST.get('start')
if request.POST.get('end'):
params['nominal_end'] = request.POST.get('end')
oozie_slas = oozie_api.get_oozie_slas(**params)
else:
oozie_slas = [] # or get latest?
if request.REQUEST.get('format') == 'json':
massaged_slas = []
for sla in oozie_slas:
massaged_slas.append(massaged_sla_for_json(sla, request))
return HttpResponse(json.dumps({'oozie_slas': massaged_slas}), content_type="text/json")
configuration = oozie_api.get_configuration()
show_slas_hint = 'org.apache.oozie.sla.service.SLAService' not in configuration.get('oozie.services.ext', '')
return render('dashboard/list_oozie_sla.mako', request, {
'oozie_slas': oozie_slas,
'show_slas_hint': show_slas_hint
})
def massaged_sla_for_json(sla, request):
massaged_sla = {
'slaStatus': sla['slaStatus'],
'id': sla['id'],
'appType': sla['appType'],
'appName': sla['appName'],
'appUrl': get_link(sla['id']),
'user': sla['user'],
'nominalTime': sla['nominalTime'],
'expectedStart': sla['expectedStart'],
'actualStart': sla['actualStart'],
'expectedEnd': sla['expectedEnd'],
'actualEnd': sla['actualEnd'],
'jobStatus': sla['jobStatus'],
'expectedDuration': sla['expectedDuration'],
'actualDuration': sla['actualDuration'],
'lastModified': sla['lastModified']
}
return massaged_sla
@show_oozie_error
def sync_coord_workflow(request, job_id):
ParametersFormSet = formset_factory(ParameterForm, extra=0)
job = check_job_access_permission(request, job_id)
check_job_edition_permission(job, request.user)
hue_coord = get_history().get_coordinator_from_config(job.conf_dict)
hue_wf = (hue_coord and hue_coord.workflow) or get_history().get_workflow_from_config(job.conf_dict)
wf_application_path = job.conf_dict.get('wf_application_path') and Hdfs.urlsplit(job.conf_dict['wf_application_path'])[2] or ''
coord_application_path = job.conf_dict.get('oozie.coord.application.path') and Hdfs.urlsplit(job.conf_dict['oozie.coord.application.path'])[2] or ''
properties = hue_coord and hue_coord.properties and dict([(param['name'], param['value']) for param in hue_coord.properties]) or None
if request.method == 'POST':
params_form = ParametersFormSet(request.POST)
if params_form.is_valid():
mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data])
# Update workflow params in coordinator
hue_coord.clear_workflow_params()
properties = dict([(param['name'], param['value']) for param in hue_coord.properties])
# Deploy WF XML
submission = Submission(user=request.user, job=hue_wf, fs=request.fs, jt=request.jt, properties=properties)
submission._create_file(wf_application_path, hue_wf.XML_FILE_NAME, hue_wf.to_xml(mapping=properties), do_as=True)
# Deploy Coordinator XML
job.conf_dict.update(mapping)
submission = Submission(user=request.user, job=hue_coord, fs=request.fs, jt=request.jt, properties=job.conf_dict, oozie_id=job.id)
submission._create_file(coord_application_path, hue_coord.XML_FILE_NAME, hue_coord.to_xml(mapping=job.conf_dict), do_as=True)
# Server picks up deployed Coordinator XML changes after running 'update' action
submission.update_coord()
request.info(_('Successfully updated Workflow definition'))
return redirect(reverse('oozie:list_oozie_coordinator', kwargs={'job_id': job_id}))
else:
request.error(_('Invalid submission form: %s' % params_form.errors))
else:
new_params = hue_wf and hue_wf.find_all_parameters() or []
new_params = dict([(param['name'], param['value']) for param in new_params])
# Set previous values
if properties:
new_params = dict([(key, properties[key]) if key in properties.keys() else (key, new_params[key]) for key, value in new_params.iteritems()])
initial_params = ParameterForm.get_initial_params(new_params)
params_form = ParametersFormSet(initial=initial_params)
popup = render('editor2/submit_job_popup.mako', request, {
'params_form': params_form,
'name': _('Job'),
'header': _('Sync Workflow definition?'),
'action': reverse('oozie:sync_coord_workflow', kwargs={'job_id': job_id})
}, force_template=True).content
return JsonResponse(popup, safe=False)
@show_oozie_error
def rerun_oozie_job(request, job_id, app_path):
ParametersFormSet = formset_factory(ParameterForm, extra=0)
oozie_workflow = check_job_access_permission(request, job_id)
check_job_edition_permission(oozie_workflow, request.user)
if request.method == 'POST':
rerun_form = RerunForm(request.POST, oozie_workflow=oozie_workflow)
params_form = ParametersFormSet(request.POST)
if sum([rerun_form.is_valid(), params_form.is_valid()]) == 2:
args = {}
if request.POST['rerun_form_choice'] == 'fail_nodes':
args['fail_nodes'] = 'true'
else:
args['skip_nodes'] = ','.join(rerun_form.cleaned_data['skip_nodes'])
args['deployment_dir'] = app_path
mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data])
_rerun_workflow(request, job_id, args, mapping)
request.info(_('Workflow re-running.'))
return redirect(reverse('oozie:list_oozie_workflow', kwargs={'job_id': job_id}))
else:
request.error(_('Invalid submission form: %s %s' % (rerun_form.errors, params_form.errors)))
else:
rerun_form = RerunForm(oozie_workflow=oozie_workflow)
initial_params = ParameterForm.get_initial_params(oozie_workflow.conf_dict)
params_form = ParametersFormSet(initial=initial_params)
popup = render('dashboard/rerun_job_popup.mako', request, {
'rerun_form': rerun_form,
'params_form': params_form,
'action': reverse('oozie:rerun_oozie_job', kwargs={'job_id': job_id, 'app_path': app_path}),
}, force_template=True).content
return JsonResponse(popup, safe=False)
def _rerun_workflow(request, oozie_id, run_args, mapping):
try:
submission = Submission(user=request.user, fs=request.fs, jt=request.jt, properties=mapping, oozie_id=oozie_id)
job_id = submission.rerun(**run_args)
return job_id
except RestException, ex:
msg = _("Error re-running workflow %s.") % (oozie_id,)
LOG.exception(msg)
raise PopupException(msg, detail=ex._headers.get('oozie-error-message', ex))
@show_oozie_error
def rerun_oozie_coordinator(request, job_id, app_path):
oozie_coordinator = check_job_access_permission(request, job_id)
check_job_edition_permission(oozie_coordinator, request.user)
ParametersFormSet = formset_factory(ParameterForm, extra=0)
if request.method == 'POST':
params_form = ParametersFormSet(request.POST)
rerun_form = RerunCoordForm(request.POST, oozie_coordinator=oozie_coordinator)
if sum([rerun_form.is_valid(), params_form.is_valid()]) == 2:
args = {}
args['deployment_dir'] = app_path
params = {
'type': 'action',
'scope': ','.join(oozie_coordinator.aggreate(rerun_form.cleaned_data['actions'])),
'refresh': rerun_form.cleaned_data['refresh'],
'nocleanup': rerun_form.cleaned_data['nocleanup'],
}
properties = dict([(param['name'], param['value']) for param in params_form.cleaned_data])
_rerun_coordinator(request, job_id, args, params, properties)
request.info(_('Coordinator re-running.'))
return redirect(reverse('oozie:list_oozie_coordinator', kwargs={'job_id': job_id}))
else:
request.error(_('Invalid submission form: %s') % smart_unicode(rerun_form.errors))
return list_oozie_coordinator(request, job_id)
else:
rerun_form = RerunCoordForm(oozie_coordinator=oozie_coordinator)
initial_params = ParameterForm.get_initial_params(oozie_coordinator.conf_dict)
params_form = ParametersFormSet(initial=initial_params)
popup = render('dashboard/rerun_coord_popup.mako', request, {
'rerun_form': rerun_form,
'params_form': params_form,
'action': reverse('oozie:rerun_oozie_coord', kwargs={'job_id': job_id, 'app_path': app_path}),
}, force_template=True).content
return JsonResponse(popup, safe=False)
def _rerun_coordinator(request, oozie_id, args, params, properties):
try:
submission = Submission(user=request.user, fs=request.fs, jt=request.jt, oozie_id=oozie_id, properties=properties)
job_id = submission.rerun_coord(params=params, **args)
return job_id
except RestException, ex:
msg = _("Error re-running coordinator %s.") % (oozie_id,)
LOG.exception(msg)
raise PopupException(msg, detail=ex._headers.get('oozie-error-message', ex))
@show_oozie_error
def rerun_oozie_bundle(request, job_id, app_path):
oozie_bundle = check_job_access_permission(request, job_id)
check_job_edition_permission(oozie_bundle, request.user)
ParametersFormSet = formset_factory(ParameterForm, extra=0)
if request.method == 'POST':
params_form = ParametersFormSet(request.POST)
rerun_form = RerunBundleForm(request.POST, oozie_bundle=oozie_bundle)
if sum([rerun_form.is_valid(), params_form.is_valid()]) == 2:
args = {}
args['deployment_dir'] = app_path
params = {
'coord-scope': ','.join(rerun_form.cleaned_data['coordinators']),
'refresh': rerun_form.cleaned_data['refresh'],
'nocleanup': rerun_form.cleaned_data['nocleanup'],
}
if rerun_form.cleaned_data['start'] and rerun_form.cleaned_data['end']:
date = {
'date-scope':
'%(start)s::%(end)s' % {
'start': utc_datetime_format(rerun_form.cleaned_data['start']),
'end': utc_datetime_format(rerun_form.cleaned_data['end'])
}
}
params.update(date)
properties = dict([(param['name'], param['value']) for param in params_form.cleaned_data])
_rerun_bundle(request, job_id, args, params, properties)
request.info(_('Bundle re-running.'))
return redirect(reverse('oozie:list_oozie_bundle', kwargs={'job_id': job_id}))
else:
request.error(_('Invalid submission form: %s' % (rerun_form.errors,)))
return list_oozie_bundle(request, job_id)
else:
rerun_form = RerunBundleForm(oozie_bundle=oozie_bundle)
initial_params = ParameterForm.get_initial_params(oozie_bundle.conf_dict)
params_form = ParametersFormSet(initial=initial_params)
popup = render('dashboard/rerun_bundle_popup.mako', request, {
'rerun_form': rerun_form,
'params_form': params_form,
'action': reverse('oozie:rerun_oozie_bundle', kwargs={'job_id': job_id, 'app_path': app_path}),
}, force_template=True).content
return JsonResponse(popup, safe=False)
def _rerun_bundle(request, oozie_id, args, params, properties):
try:
submission = Submission(user=request.user, fs=request.fs, jt=request.jt, oozie_id=oozie_id, properties=properties)
job_id = submission.rerun_bundle(params=params, **args)
return job_id
except RestException, ex:
msg = _("Error re-running bundle %s.") % (oozie_id,)
LOG.exception(msg)
raise PopupException(msg, detail=ex._headers.get('oozie-error-message', ex))
def submit_external_job(request, application_path):
ParametersFormSet = formset_factory(ParameterForm, extra=0)
if request.method == 'POST':
params_form = ParametersFormSet(request.POST)
if params_form.is_valid():
mapping = dict([(param['name'], param['value']) for param in params_form.cleaned_data])
mapping['dryrun'] = request.POST.get('dryrun_checkbox') == 'on'
application_name = os.path.basename(application_path)
application_class = Bundle if application_name == 'bundle.xml' else Coordinator if application_name == 'coordinator.xml' else get_workflow()
mapping[application_class.get_application_path_key()] = application_path
try:
submission = Submission(request.user, fs=request.fs, jt=request.jt, properties=mapping)
job_id = submission.run(application_path)
except RestException, ex:
detail = ex._headers.get('oozie-error-message', ex)
if 'Max retries exceeded with url' in str(detail):
detail = '%s: %s' % (_('The Oozie server is not running'), detail)
LOG.exception(smart_str(detail))
raise PopupException(_("Error submitting job %s") % (application_path,), detail=detail)
request.info(_('Oozie job submitted'))
view = 'list_oozie_bundle' if application_name == 'bundle.xml' else 'list_oozie_coordinator' if application_name == 'coordinator.xml' else 'list_oozie_workflow'
return redirect(reverse('oozie:%s' % view, kwargs={'job_id': job_id}))
else:
request.error(_('Invalid submission form: %s' % params_form.errors))
else:
parameters = Submission(request.user, fs=request.fs, jt=request.jt).get_external_parameters(application_path)
initial_params = ParameterForm.get_initial_params(parameters)
params_form = ParametersFormSet(initial=initial_params)
popup = render('editor/submit_job_popup.mako', request, {
'params_form': params_form,
'name': _('Job'),
'action': reverse('oozie:submit_external_job', kwargs={'application_path': application_path}),
'show_dryrun': os.path.basename(application_path) != 'bundle.xml'
}, force_template=True).content
return JsonResponse(popup, safe=False)
def massaged_workflow_actions_for_json(workflow_actions, oozie_coordinator, oozie_bundle):
actions = []
for action in workflow_actions:
if oozie_coordinator is not None:
setattr(action, 'oozie_coordinator', oozie_coordinator)
if oozie_bundle is not None:
setattr(action, 'oozie_bundle', oozie_bundle)
massaged_action = {
'id': action.id,
'log': action.get_absolute_log_url(),
'url': action.get_absolute_url(),
'name': action.name,
'type': action.type,
'status': action.status,
'externalIdUrl': action.get_external_id_url(),
'externalId': action.externalId,
'startTime': format_time(action.startTime),
'endTime': format_time(action.endTime),
'retries': action.retries,
'errorCode': action.errorCode,
'errorMessage': action.errorMessage,
'transition': action.transition,
'data': action.data,
}
actions.append(massaged_action)
return actions
def massaged_coordinator_actions_for_json(coordinator, oozie_bundle):
coordinator_id = coordinator.id
coordinator_actions = coordinator.get_working_actions()
actions = []
related_job_ids = []
related_job_ids.append('coordinator_job_id=%s' % coordinator_id)
if oozie_bundle is not None:
related_job_ids.append('bundle_job_id=%s' %oozie_bundle.id)
for action in coordinator_actions:
massaged_action = {
'id': action.id,
'url': action.externalId and reverse('oozie:list_oozie_workflow', kwargs={'job_id': action.externalId}) + '?%s' % '&'.join(related_job_ids) or '',
'number': action.actionNumber,
'type': action.type,
'status': action.status,
'externalId': action.externalId or '-',
'externalIdUrl': action.externalId and reverse('oozie:list_oozie_workflow_action', kwargs={'action': action.externalId}) or '',
'nominalTime': format_time(action.nominalTime),
'title': action.title,
'createdTime': format_time(action.createdTime),
'lastModifiedTime': format_time(action.lastModifiedTime),
'errorCode': action.errorCode,
'errorMessage': action.errorMessage,
'missingDependencies': action.missingDependencies
}
actions.append(massaged_action)
# Sorting for Oozie < 4.1 backward compatibility
actions.sort(key=lambda k: k['number'], reverse=True)
return actions
def massaged_bundle_actions_for_json(bundle):
bundle_actions = bundle.get_working_actions()
actions = []
for action in bundle_actions:
massaged_action = {
'id': action.coordJobId,
'url': action.coordJobId and reverse('oozie:list_oozie_coordinator', kwargs={'job_id': action.coordJobId}) + '?bundle_job_id=%s' % bundle.id or '',
'name': action.coordJobName,
'type': action.type,
'status': action.status,
'externalId': action.coordExternalId or '-',
'frequency': action.frequency,
'timeUnit': action.timeUnit,
'nextMaterializedTime': action.nextMaterializedTime,
'concurrency': action.concurrency,
'pauseTime': action.pauseTime,
'user': action.user,
'acl': action.acl,
'timeOut': action.timeOut,
'coordJobPath': action.coordJobPath,
'executionPolicy': action.executionPolicy,
'startTime': action.startTime,
'endTime': action.endTime,
'lastAction': action.lastAction
}
actions.insert(0, massaged_action)
return actions
def format_time(st_time):
if st_time is None:
return '-'
elif type(st_time) == time.struct_time:
return time.strftime("%a, %d %b %Y %H:%M:%S", st_time)
else:
return st_time
def catch_unicode_time(u_time):
if type(u_time) == time.struct_time:
return u_time
else:
return datetime.timetuple(datetime.strptime(u_time, '%a, %d %b %Y %H:%M:%S %Z'))
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False):
jobs = []
for job in oozie_jobs:
if not just_sla or (just_sla and job.has_sla) and job.appName != 'pig-app-hue-script':
last_modified_time_millis = hasattr(job, 'lastModTime') and job.lastModTime and (time.time() - time.mktime(job.lastModTime)) * 1000 or 0
duration_millis = job.endTime and job.startTime and ((time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or 0
massaged_job = {
'id': job.id,
'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None,
'lastModTimeInMillis': last_modified_time_millis,
'lastModTimeFormatted': last_modified_time_millis and format_duration_in_millis(last_modified_time_millis) or None,
'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime and format_time(job.kickoffTime) or '',
'kickoffTimeInMillis': hasattr(job, 'kickoffTime') and job.kickoffTime and time.mktime(catch_unicode_time(job.kickoffTime)) or 0,
'nextMaterializedTime': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or '',
'nextMaterializedTimeInMillis': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and time.mktime(job.nextMaterializedTime) or 0,
'timeOut': hasattr(job, 'timeOut') and job.timeOut or None,
'endTime': job.endTime and format_time(job.endTime) or None,
'pauseTime': hasattr(job, 'pauseTime') and job.pauseTime and format_time(job.endTime) or None,
'concurrency': hasattr(job, 'concurrency') and job.concurrency or None,
'endTimeInMillis': job.endTime and time.mktime(job.endTime) or 0,
'status': job.status,
'isRunning': job.is_running(),
'duration': duration_millis and format_duration_in_millis(duration_millis) or None,
'durationInMillis': duration_millis,
'appName': job.appName,
'progress': job.get_progress(),
'user': job.user,
'absoluteUrl': job.get_absolute_url(),
'canEdit': has_job_edition_permission(job, user),
'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}),
'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'suspend'}),
'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'resume'}),
'created': hasattr(job, 'createdTime') and job.createdTime and format_time(job.createdTime) or '',
'createdInMillis': hasattr(job, 'createdTime') and job.createdTime and time.mktime(catch_unicode_time(job.createdTime)) or 0,
'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None,
'startTimeInMillis': hasattr(job, 'startTime') and job.startTime and time.mktime(job.startTime) or 0,
'run': hasattr(job, 'run') and job.run or 0,
'frequency': hasattr(job, 'frequency') and Coordinator.CRON_MAPPING.get(job.frequency, job.frequency) or None,
'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None,
'parentUrl': hasattr(job, 'parentId') and job.parentId and get_link(job.parentId) or '',
'submittedManually': hasattr(job, 'parentId') and (job.parentId is None or 'C@' not in job.parentId)
}
jobs.append(massaged_job)
return { 'jobs': jobs }
def check_job_access_permission(request, job_id, **kwargs):
"""
Decorator ensuring that the user has access to the job submitted to Oozie.
Arg: Oozie 'workflow', 'coordinator' or 'bundle' ID.
Return: the Oozie workflow, coordinator or bundle or raise an exception
Notice: its gets an id in input and returns the full object in output (not an id).
"""
if job_id is not None:
oozie_api = get_oozie(request.user)
if job_id.endswith('W'):
get_job = oozie_api.get_job
elif job_id.endswith('C'):
get_job = oozie_api.get_coordinator
else:
get_job = oozie_api.get_bundle
try:
if job_id.endswith('C'):
oozie_job = get_job(job_id, **kwargs)
else:
oozie_job = get_job(job_id)
except RestException, ex:
msg = _("Error accessing Oozie job %s.") % (job_id,)
LOG.exception(msg)
raise PopupException(msg, detail=ex._headers['oozie-error-message', ''])
if request.user.is_superuser \
or oozie_job.user == request.user.username \
or has_dashboard_jobs_access(request.user):
return oozie_job
else:
message = _("Permission denied. %(username)s does not have the permissions to access job %(id)s.") % \
{'username': request.user.username, 'id': oozie_job.id}
access_warn(request, message)
raise PopupException(message)
def check_job_edition_permission(oozie_job, user):
if has_job_edition_permission(oozie_job, user):
return oozie_job
else:
message = _("Permission denied. %(username)s does not have the permissions to modify job %(id)s.") % \
{'username': user.username, 'id': oozie_job.id}
raise PopupException(message)
def has_job_edition_permission(oozie_job, user):
return user.is_superuser or oozie_job.user == user.username
def has_dashboard_jobs_access(user):
return user.is_superuser or user.has_hue_permission(action="dashboard_jobs_access", app=DJANGO_APPS[0])
|
MobinRanjbar/hue
|
apps/oozie/src/oozie/views/dashboard.py
|
Python
|
apache-2.0
| 44,447
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-07 22:51
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Announcement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('active_until', models.DateTimeField(null=True, verbose_name='active until')),
('active', models.BooleanField(default=True, verbose_name='active')),
('message', c3nav.mapdata.fields.I18nField(verbose_name='Message')),
],
options={
'verbose_name': 'Announcement',
'verbose_name_plural': 'Announcements',
'get_latest_by': 'created',
'default_related_name': 'announcements',
},
),
]
|
c3nav/c3nav
|
src/c3nav/site/migrations/0001_announcement.py
|
Python
|
apache-2.0
| 1,128
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.ads.googleads.v9.resources.types import account_budget_proposal
from google.ads.googleads.v9.services.types import (
account_budget_proposal_service,
)
from .transports.base import (
AccountBudgetProposalServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import AccountBudgetProposalServiceGrpcTransport
class AccountBudgetProposalServiceClientMeta(type):
"""Metaclass for the AccountBudgetProposalService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[AccountBudgetProposalServiceTransport]]
_transport_registry["grpc"] = AccountBudgetProposalServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[AccountBudgetProposalServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class AccountBudgetProposalServiceClient(
metaclass=AccountBudgetProposalServiceClientMeta
):
"""A service for managing account-level budgets via proposals.
A proposal is a request to create a new budget or make changes
to an existing one.
Reads for account-level budgets managed by these proposals will
be supported in a future version. Until then, please use the
BudgetOrderService from the AdWords API. Learn more at
https://developers.google.com/adwords/api/docs/guides/budget-
order
Mutates:
The CREATE operation creates a new proposal.
UPDATE operations aren't supported.
The REMOVE operation cancels a pending proposal.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AccountBudgetProposalServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AccountBudgetProposalServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> AccountBudgetProposalServiceTransport:
"""Return the transport used by the client instance.
Returns:
AccountBudgetProposalServiceTransport: The transport used by the client instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def account_budget_path(customer_id: str, account_budget_id: str,) -> str:
"""Return a fully-qualified account_budget string."""
return "customers/{customer_id}/accountBudgets/{account_budget_id}".format(
customer_id=customer_id, account_budget_id=account_budget_id,
)
@staticmethod
def parse_account_budget_path(path: str) -> Dict[str, str]:
"""Parse a account_budget path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/accountBudgets/(?P<account_budget_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def account_budget_proposal_path(
customer_id: str, account_budget_proposal_id: str,
) -> str:
"""Return a fully-qualified account_budget_proposal string."""
return "customers/{customer_id}/accountBudgetProposals/{account_budget_proposal_id}".format(
customer_id=customer_id,
account_budget_proposal_id=account_budget_proposal_id,
)
@staticmethod
def parse_account_budget_proposal_path(path: str) -> Dict[str, str]:
"""Parse a account_budget_proposal path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/accountBudgetProposals/(?P<account_budget_proposal_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def billing_setup_path(customer_id: str, billing_setup_id: str,) -> str:
"""Return a fully-qualified billing_setup string."""
return "customers/{customer_id}/billingSetups/{billing_setup_id}".format(
customer_id=customer_id, billing_setup_id=billing_setup_id,
)
@staticmethod
def parse_billing_setup_path(path: str) -> Dict[str, str]:
"""Parse a billing_setup path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/billingSetups/(?P<billing_setup_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[
str, AccountBudgetProposalServiceTransport, None
] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the account budget proposal service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.AccountBudgetProposalServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, AccountBudgetProposalServiceTransport):
# transport is a AccountBudgetProposalServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = AccountBudgetProposalServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_account_budget_proposal(
self,
request: Union[
account_budget_proposal_service.GetAccountBudgetProposalRequest,
dict,
] = None,
*,
resource_name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> account_budget_proposal.AccountBudgetProposal:
r"""Returns an account-level budget proposal in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (Union[google.ads.googleads.v9.services.types.GetAccountBudgetProposalRequest, dict]):
The request object. Request message for
[AccountBudgetProposalService.GetAccountBudgetProposal][google.ads.googleads.v9.services.AccountBudgetProposalService.GetAccountBudgetProposal].
resource_name (:class:`str`):
Required. The resource name of the
account-level budget proposal to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v9.resources.types.AccountBudgetProposal:
An account-level budget proposal.
All fields prefixed with 'proposed' may not
necessarily be applied directly. For example,
proposed spending limits may be adjusted before their
application. This is true if the 'proposed' field has
an 'approved' counterpart, e.g. spending limits.
Please note that the proposal type (proposal_type)
changes which fields are required and which must
remain empty.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a account_budget_proposal_service.GetAccountBudgetProposalRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
account_budget_proposal_service.GetAccountBudgetProposalRequest,
):
request = account_budget_proposal_service.GetAccountBudgetProposalRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_account_budget_proposal
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def mutate_account_budget_proposal(
self,
request: Union[
account_budget_proposal_service.MutateAccountBudgetProposalRequest,
dict,
] = None,
*,
customer_id: str = None,
operation: account_budget_proposal_service.AccountBudgetProposalOperation = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> account_budget_proposal_service.MutateAccountBudgetProposalResponse:
r"""Creates, updates, or removes account budget proposals. Operation
statuses are returned.
List of thrown errors: `AccountBudgetProposalError <>`__
`AuthenticationError <>`__ `AuthorizationError <>`__
`DatabaseError <>`__ `DateError <>`__ `FieldError <>`__
`FieldMaskError <>`__ `HeaderError <>`__ `InternalError <>`__
`MutateError <>`__ `QuotaError <>`__ `RequestError <>`__
`StringLengthError <>`__
Args:
request (Union[google.ads.googleads.v9.services.types.MutateAccountBudgetProposalRequest, dict]):
The request object. Request message for
[AccountBudgetProposalService.MutateAccountBudgetProposal][google.ads.googleads.v9.services.AccountBudgetProposalService.MutateAccountBudgetProposal].
customer_id (:class:`str`):
Required. The ID of the customer.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operation (:class:`google.ads.googleads.v9.services.types.AccountBudgetProposalOperation`):
Required. The operation to perform on
an individual account-level budget
proposal.
This corresponds to the ``operation`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v9.services.types.MutateAccountBudgetProposalResponse:
Response message for account-level
budget mutate operations.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([customer_id, operation]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a account_budget_proposal_service.MutateAccountBudgetProposalRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
account_budget_proposal_service.MutateAccountBudgetProposalRequest,
):
request = account_budget_proposal_service.MutateAccountBudgetProposalRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operation is not None:
request.operation = operation
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_account_budget_proposal
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("AccountBudgetProposalServiceClient",)
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/account_budget_proposal_service/client.py
|
Python
|
apache-2.0
| 26,197
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import sys
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[_A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_FIRST = 0
STATUS_SECOND = 1
class DNSResolver(object):
def __init__(self, server_list=None, prefer_ipv6=False):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
if prefer_ipv6:
self._QTYPES = [QTYPE_AAAA, QTYPE_A]
else:
self._QTYPES = [QTYPE_A, QTYPE_AAAA]
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if not (line and line.startswith(b'nameserver')):
continue
parts = line.split()
if len(parts) < 2:
continue
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) < 2:
continue
ip = parts[0]
if not common.is_ip(ip):
continue
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_SECOND) \
== STATUS_FIRST:
self._hostname_status[hostname] = STATUS_SECOND
self._send_req(hostname, self._QTYPES[1])
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) \
== STATUS_SECOND:
for question in response.questions:
if question[1] == self._QTYPES[1]:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_FIRST
self._send_req(hostname, self._QTYPES[0])
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, self._QTYPES[0])
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&$@.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
plus1s/shadowsocks-py-mu
|
shadowsocks/asyncdns.py
|
Python
|
apache-2.0
| 17,651
|
if __name__ == '__main__':
x = int(input())
y = int(input())
z = int(input())
n = int(input())
L = [[a,b,c] for a in range(x+1) for b in range(y+1) for c in range(z+1)]
L = list(filter(lambda x : sum(x) != n, L))
print(L)
|
kakaba2009/MachineLearning
|
python/src/algorithm/coding/basic/comprehension.py
|
Python
|
apache-2.0
| 240
|
from django import forms
from captcha.fields import CaptchaField
class CaptchaTestForm(forms.Form):
myfield = AnyOtherField()
captcha = CaptchaField()
def some_view(request):
if request.POST:
form = CaptchaTestForm(request.POST)
# Validate the form: the captcha field will automatically
# check the input
if form.is_valid():
human = True
else:
form = CaptchaTestForm()
return render_to_response('template.html',locals())
|
Andrew0701/coursework-web
|
progress/home/captcha.py
|
Python
|
apache-2.0
| 497
|
# Load pickled data
import pickle
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import ImageGrid
import numpy as np
import tensorflow as tf
from tensorflow.contrib.layers import flatten
from sklearn.utils import shuffle
class Data:
def __init__(self):
training_file = 'data/train.p'
validation_file= 'data/valid.p'
testing_file = 'data/test.p'
with open(training_file, mode='rb') as f:
train = pickle.load(f)
with open(validation_file, mode='rb') as f:
valid = pickle.load(f)
with open(testing_file, mode='rb') as f:
test = pickle.load(f)
self.X_train, self.y_train = train['features'], train['labels']
self.X_valid, self.y_valid = valid['features'], valid['labels']
self.X_test, self.y_test = test['features'], test['labels']
def render_data(self):
image_with_label = zip(self.X_train, self.y_train)
seen_labels = set()
fig = plt.figure(figsize=(200, 200))
total_unique_labels = len(set(self.y_train))
unique_rows = total_unique_labels // 5 + 1
grid = ImageGrid(fig, 151, # similar to subplot(141)
nrows_ncols=(unique_rows, 5),
axes_pad=0.05,
label_mode="1",
)
i = 0
for i_l in image_with_label:
img, label = i_l
if label not in seen_labels:
im = grid[i].imshow(img)
seen_labels.add(label)
i += 1
plt.show()
def LeNet(x, max_labels):
# Hyper parameters
mu = 0
sigma = 0.1
# Convolutional Layer 1: Input = 32x32x3 Output = 28x28x6
conv1_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 3, 6), mean=mu, stddev=sigma), name="v1")
conv1_b = tf.Variable(tf.zeros(6), name="v2")
conv1 = tf.nn.conv2d(x, conv1_W, strides=[1, 1, 1, 1], padding='VALID') + conv1_b
# Activation Layer
conv1 = tf.nn.relu(conv1)
# Max Pooling : Input = 28x28x6 Output = 14x14x6
conv1 = tf.nn.max_pool(conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID')
# Convolutional Layer 2: Input = 14x14x6 Output: 10x10x16
conv2_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 6, 16), mean=mu, stddev=sigma), name="v3")
conv2_b = tf.Variable(tf.zeros(16), name="v4")
conv2 = tf.nn.conv2d(conv1, conv2_W, strides=[1, 1, 1, 1], padding='VALID') + conv2_b
# Activation Layer
conv2 = tf.nn.relu(conv2)
# Max Pooling : Input = 10x10x16 Output = 5x5x16
conv2 = tf.nn.max_pool(conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID')
# Fully Connected Layer
fc0 = flatten(conv2)
# Layer 3 - Fully Connected: Input = 400 Output = 120
fc1_W = tf.Variable(tf.truncated_normal(shape=(400, 120), mean=mu, stddev=sigma), name="v5")
fc1_b = tf.Variable(tf.zeros(120), name="v6")
fc1 = tf.matmul(fc0, fc1_W) + fc1_b
# Activation
fc1 = tf.nn.relu(fc1)
# Layer 4 : Fully Connected: Input = 120 Output = 84
fc2_W = tf.Variable(tf.truncated_normal(shape=(120, 84), mean=mu, stddev=sigma), name="v7")
fc2_b = tf.Variable(tf.zeros(84), name="v8")
fc2 = tf.matmul(fc1, fc2_W) + fc2_b
# Activation
fc2 = tf.nn.relu(fc2)
# Layer 5 - Fully Connected Input = 84 Output = 10
fc3_W = tf.Variable(tf.truncated_normal(shape=(84, max_labels), mean=mu, stddev=sigma), name="v9")
fc3_b = tf.Variable(tf.zeros(max_labels), name="v10")
logits = tf.matmul(fc2, fc3_W) + fc3_b
return logits
def train(max_classified_id):
x = tf.placeholder(tf.float32, (None, 32, 32, 3), name="X")
y = tf.placeholder(tf.int32, (None), name="Y")
one_hot_y = tf.one_hot(y, max_classified_id)
rate = 0.001
logits = LeNet(x, max_classified_id)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=one_hot_y)
loss_operation = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate=rate)
training_operation = optimizer.minimize(loss_operation)
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(one_hot_y, 1))
accuracy_operation = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
saver = tf.train.Saver(), training_operation, accuracy_operation
return saver, training_operation, accuracy_operation, x, y
def evaluate(x, y, X_data, y_data, accuracy_operation, BATCH_SIZE):
num_examples = len(X_data)
total_accuracy = 0
sess = tf.get_default_session()
for offset in range(0, num_examples, BATCH_SIZE):
batch_x, batch_y = X_data[offset:offset+BATCH_SIZE], y_data[offset:offset+BATCH_SIZE]
accuracy = sess.run(accuracy_operation, feed_dict={x: batch_x, y: batch_y})
total_accuracy += (accuracy * len(batch_x))
return total_accuracy / num_examples
def main():
data = Data()
EPOCHS = 10
BATCH_SIZE = 128
max_classified_id = np.max(data.y_train)
saver, training_operation, accuracy_operation, x, y = train(max_classified_id)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
num_examples = len(data.X_train)
print("Training...")
print()
for i in range(EPOCHS):
X_train, y_train = shuffle(data.X_train, data.y_train)
for offset in range(0, num_examples, BATCH_SIZE):
end = offset + BATCH_SIZE
batch_x, batch_y = X_train[offset:end], y_train[offset:end]
sess.run(training_operation, feed_dict={x: batch_x, y: batch_y})
validation_accuracy = evaluate(x, y, data.X_valid, data.y_valid, accuracy_operation, BATCH_SIZE)
print("EPOCH {} ...".format(i+1))
print("Validation Accuracy = {:.3f}".format(validation_accuracy))
print()
saver.save(sess, './lenet')
print("Model saved")
if __name__ == "__main__":
main()
|
ssarangi/self_driving_cars
|
traffic_sign_classifier/classify.py
|
Python
|
apache-2.0
| 6,030
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
---
module: softlayer_vs_ip
short_description: Retrieves instance ip addresses from Softlayer
description:
- Retrieves instance ip addresses of all adapters from Softlayer
- the result is stored in the "result" dict entry of he registered variable
requirements:
- Requires SoftLayer python client
- Requires Ansible
options:
api_key:
description:
- SoftLayer API Key
default: null
sl_username:
description:
- SoftLayer username
default: null
fqdn:
description:
- The fully qualified domain name of the instance.
type: string
required: true
author: scoss
notes:
- Instead of supplying api_key and username, .softlayer or env variables
'''
from ansible.module_utils.basic import *
import SoftLayer
import sys
import logging
import time
from softlayer_vs_basic import *
class IpAddressReader(SoftlayerVirtualServerBasic):
def __init__(self, sl_client, instance_config):
SoftlayerVirtualServerBasic.__init__(self, sl_client, instance_config)
def read_ip_address(self):
sl_instance = self.sl_virtual_guest.getObject(id=self.get_vs_id(True), mask='primaryBackendIpAddress, primaryIpAddress')
return sl_instance
def main():
module_helper = AnsibleModule(
argument_spec = dict(
SLClientConfig.arg_spec().items() + VSInstanceConfigBasic.arg_spec().items()
)
)
sl_client_config = SLClientConfig(module_helper.params)
sl_client = SoftLayer.Client(username=sl_client_config.sl_username, api_key=sl_client_config.api_key)
vs = IpAddressReader(sl_client,
VSInstanceConfigBasic(ansible_config=module_helper.params))
try:
module_helper.exit_json(changed=False, result=vs.read_ip_address())
except Exception as se:
module_helper.fail_json(changed=False, msg=str(se))
main()
|
hubward/ansible-softlayer
|
softlayer_vs_ip.py
|
Python
|
apache-2.0
| 1,999
|
"""Bitcoin information service that uses blockchain.info."""
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY, CONF_DISPLAY_OPTIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.info"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", "min"],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", "PH/s"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config.get(CONF_CURRENCY)
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
"""Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc":
self._state = "{0:.1f}".format(stats.trade_volume_btc)
elif self.type == "miners_revenue_usd":
self._state = "{0:.0f}".format(stats.miners_revenue_usd)
elif self.type == "btc_mined":
self._state = "{}".format(stats.btc_mined * 0.00000001)
elif self.type == "trade_volume_usd":
self._state = "{0:.1f}".format(stats.trade_volume_usd)
elif self.type == "difficulty":
self._state = "{0:.0f}".format(stats.difficulty)
elif self.type == "minutes_between_blocks":
self._state = "{0:.2f}".format(stats.minutes_between_blocks)
elif self.type == "number_of_transactions":
self._state = "{}".format(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = "{0:.1f}".format(stats.hash_rate * 0.000001)
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = "{}".format(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = "{0:.1f}".format(stats.blocks_size)
elif self.type == "total_fees_btc":
self._state = "{0:.2f}".format(stats.total_fees_btc * 0.00000001)
elif self.type == "total_btc_sent":
self._state = "{0:.2f}".format(stats.total_btc_sent * 0.00000001)
elif self.type == "estimated_btc_sent":
self._state = "{0:.2f}".format(stats.estimated_btc_sent * 0.00000001)
elif self.type == "total_btc":
self._state = "{0:.2f}".format(stats.total_btc * 0.00000001)
elif self.type == "total_blocks":
self._state = "{0:.0f}".format(stats.total_blocks)
elif self.type == "next_retarget":
self._state = "{0:.2f}".format(stats.next_retarget)
elif self.type == "estimated_transaction_volume_usd":
self._state = "{0:.2f}".format(stats.estimated_transaction_volume_usd)
elif self.type == "miners_revenue_btc":
self._state = "{0:.1f}".format(stats.miners_revenue_btc * 0.00000001)
elif self.type == "market_price_usd":
self._state = "{0:.2f}".format(stats.market_price_usd)
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.info."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker()
|
Teagan42/home-assistant
|
homeassistant/components/bitcoin/sensor.py
|
Python
|
apache-2.0
| 6,395
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import iteritems, iterkeys
import pandas as pd
from . utils.protocol_utils import Enum
from zipline.finance.trading import with_environment
from zipline.utils.algo_instance import get_algo_instance
# Datasource type should completely determine the other fields of a
# message with its type.
DATASOURCE_TYPE = Enum(
'AS_TRADED_EQUITY',
'MERGER',
'SPLIT',
'DIVIDEND',
'TRADE',
'TRANSACTION',
'ORDER',
'EMPTY',
'DONE',
'CUSTOM',
'BENCHMARK',
'COMMISSION'
)
# Expected fields/index values for a dividend Series.
DIVIDEND_FIELDS = [
'declared_date',
'ex_date',
'gross_amount',
'net_amount',
'pay_date',
'payment_sid',
'ratio',
'sid',
]
# Expected fields/index values for a dividend payment Series.
DIVIDEND_PAYMENT_FIELDS = ['id', 'payment_sid', 'cash_amount', 'share_count']
def dividend_payment(data=None):
"""
Take a dictionary whose values are in DIVIDEND_PAYMENT_FIELDS and return a
series representing the payment of a dividend.
Ids are assigned to each historical dividend in
PerformanceTracker.update_dividends. They are guaranteed to be unique
integers with the context of a single simulation. If @data is non-empty, a
id is required to identify the historical dividend associated with this
payment.
Additionally, if @data is non-empty, either data['cash_amount'] should be
nonzero or data['payment_sid'] should be a security identifier and
data['share_count'] should be nonzero.
The returned Series is given its id value as a name so that concatenating
payments results in a DataFrame indexed by id. (Note, however, that the
name value is not used to construct an index when this series is returned
by function passed to `DataFrame.apply`. In such a case, pandas preserves
the index of the DataFrame on which `apply` is being called.)
"""
return pd.Series(
data=data,
name=data['id'] if data is not None else None,
index=DIVIDEND_PAYMENT_FIELDS,
dtype=object,
)
class Event(object):
def __init__(self, initial_values=None):
if initial_values:
self.__dict__ = initial_values
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __delitem__(self, name):
delattr(self, name)
def keys(self):
return self.__dict__.keys()
def __eq__(self, other):
return hasattr(other, '__dict__') and self.__dict__ == other.__dict__
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "Event({0})".format(self.__dict__)
def to_series(self, index=None):
return pd.Series(self.__dict__, index=index)
class Order(Event):
pass
class Portfolio(object):
def __init__(self):
self.capital_used = 0.0
self.starting_cash = 0.0
self.portfolio_value = 0.0
self.pnl = 0.0
self.returns = 0.0
self.cash = 0.0
self.positions = Positions()
self.start_date = None
self.positions_value = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Portfolio({0})".format(self.__dict__)
class Account(object):
'''
The account object tracks information about the trading account. The
values are updated as the algorithm runs and its keys remain unchanged.
If connected to a broker, one can update these values with the trading
account values as reported by the broker.
'''
def __init__(self):
self.settled_cash = 0.0
self.accrued_interest = 0.0
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
self.maintenance_margin_requirement = 0.0
self.available_funds = 0.0
self.excess_liquidity = 0.0
self.cushion = 0.0
self.day_trades_remaining = float('inf')
self.leverage = 0.0
self.net_liquidation = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Account({0})".format(self.__dict__)
def _get_state(self):
return 'Account', self.__dict__
def _set_state(self, saved_state):
self.__dict__.update(saved_state)
class Position(object):
def __init__(self, sid):
self.sid = sid
self.amount = 0
self.cost_basis = 0.0 # per share
self.last_sale_price = 0.0
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
return "Position({0})".format(self.__dict__)
class Positions(dict):
def __missing__(self, key):
pos = Position(key)
self[key] = pos
return pos
class SIDData(object):
# Cache some data on the class so that this is shared for all instances of
# siddata.
# The dt where we cached the history.
_history_cache_dt = None
# _history_cache is a a dict mapping fields to pd.DataFrames. This is the
# most data we have for a given field for the _history_cache_dt.
_history_cache = {}
# This is the cache that is used for returns. This will have a different
# structure than the other history cache as this is always daily.
_returns_cache_dt = None
_returns_cache = None
# The last dt that we needed to cache the number of minutes.
_minute_bar_cache_dt = None
# If we are in minute mode, there is some cost associated with computing
# the number of minutes that we need to pass to the bar count of history.
# This will remain constant for a given bar and day count.
# This maps days to number of minutes.
_minute_bar_cache = {}
def __init__(self, sid, initial_values=None):
self._sid = sid
self._freqstr = None
# To check if we have data, we use the __len__ which depends on the
# __dict__. Because we are foward defining the attributes needed, we
# need to account for their entrys in the __dict__.
# We will add 1 because we need to account for the _initial_len entry
# itself.
self._initial_len = len(self.__dict__) + 1
if initial_values:
self.__dict__.update(initial_values)
@property
def datetime(self):
"""
Provides an alias from data['foo'].datetime -> data['foo'].dt
`datetime` was previously provided by adding a seperate `datetime`
member of the SIDData object via a generator that wrapped the incoming
data feed and added the field to each equity event.
This alias is intended to be temporary, to provide backwards
compatibility with existing algorithms, but should be considered
deprecated, and may be removed in the future.
"""
return self.dt
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __getitem__(self, name):
return self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def __len__(self):
return len(self.__dict__) - self._initial_len
def __contains__(self, name):
return name in self.__dict__
def __repr__(self):
return "SIDData({0})".format(self.__dict__)
def _get_buffer(self, bars, field='price'):
"""
Gets the result of history for the given number of bars and field.
This will cache the results internally.
"""
cls = self.__class__
algo = get_algo_instance()
now = algo.datetime
if now != cls._history_cache_dt:
# For a given dt, the history call for this field will not change.
# We have a new dt, so we should reset the cache.
cls._history_cache_dt = now
cls._history_cache = {}
if field not in self._history_cache \
or bars > len(cls._history_cache[field].index):
# If we have never cached this field OR the amount of bars that we
# need for this field is greater than the amount we have cached,
# then we need to get more history.
hst = algo.history(
bars, self._freqstr, field, ffill=True,
)
# Assert that the column holds ints, not security objects.
if not isinstance(self._sid, str):
hst.columns = hst.columns.astype(int)
self._history_cache[field] = hst
# Slice of only the bars needed. This is because we strore the LARGEST
# amount of history for the field, and we might request less than the
# largest from the cache.
return cls._history_cache[field][self._sid][-bars:]
def _get_bars(self, days):
"""
Gets the number of bars needed for the current number of days.
Figures this out based on the algo datafrequency and caches the result.
This caches the result by replacing this function on the object.
This means that after the first call to _get_bars, this method will
point to a new function object.
"""
def daily_get_bars(days):
return days
@with_environment()
def minute_get_bars(days, env=None):
cls = self.__class__
now = get_algo_instance().datetime
if now != cls._minute_bar_cache_dt:
cls._minute_bar_cache_dt = now
cls._minute_bar_cache = {}
if days not in cls._minute_bar_cache:
# Cache this calculation to happen once per bar, even if we
# use another transform with the same number of days.
prev = env.previous_trading_day(now)
ds = env.days_in_range(
env.add_trading_days(-days + 2, prev),
prev,
)
# compute the number of minutes in the (days - 1) days before
# today.
# 210 minutes in a an early close and 390 in a full day.
ms = sum(210 if d in env.early_closes else 390 for d in ds)
# Add the number of minutes for today.
ms += int(
(now - env.get_open_and_close(now)[0]).total_seconds() / 60
)
cls._minute_bar_cache[days] = ms + 1 # Account for this minute
return cls._minute_bar_cache[days]
if get_algo_instance().sim_params.data_frequency == 'daily':
self._freqstr = '1d'
# update this method to point to the daily variant.
self._get_bars = daily_get_bars
else:
self._freqstr = '1m'
# update this method to point to the minute variant.
self._get_bars = minute_get_bars
# Not actually recursive because we have already cached the new method.
return self._get_bars(days)
def mavg(self, days):
return self._get_buffer(self._get_bars(days)).mean()
def stddev(self, days):
return self._get_buffer(self._get_bars(days)).std(ddof=1)
def vwap(self, days):
bars = self._get_bars(days)
prices = self._get_buffer(bars)
vols = self._get_buffer(bars, field='volume')
return (prices * vols).sum() / vols.sum()
def returns(self):
algo = get_algo_instance()
now = algo.datetime
if now != self._returns_cache_dt:
self._returns_cache_dt = now
self._returns_cache = algo.history(2, '1d', 'price', ffill=True)
hst = self._returns_cache[self._sid]
return (hst.iloc[-1] - hst.iloc[0]) / hst.iloc[0]
class BarData(object):
"""
Holds the event data for all sids for a given dt.
This is what is passed as `data` to the `handle_data` function.
Note: Many methods are analogues of dictionary because of historical
usage of what this replaced as a dictionary subclass.
"""
def __init__(self, data=None):
self._data = data or {}
self._contains_override = None
def __contains__(self, name):
if self._contains_override:
if self._contains_override(name):
return name in self._data
else:
return False
else:
return name in self._data
def has_key(self, name):
"""
DEPRECATED: __contains__ is preferred, but this method is for
compatibility with existing algorithms.
"""
return name in self
def __setitem__(self, name, value):
self._data[name] = value
def __getitem__(self, name):
return self._data[name]
def __delitem__(self, name):
del self._data[name]
def __iter__(self):
for sid, data in iteritems(self._data):
# Allow contains override to filter out sids.
if sid in self:
if len(data):
yield sid
def iterkeys(self):
# Allow contains override to filter out sids.
return (sid for sid in iterkeys(self._data) if sid in self)
def keys(self):
# Allow contains override to filter out sids.
return list(self.iterkeys())
def itervalues(self):
return (value for _sid, value in self.iteritems())
def values(self):
return list(self.itervalues())
def iteritems(self):
return ((sid, value) for sid, value
in iteritems(self._data)
if sid in self)
def items(self):
return list(self.iteritems())
def __len__(self):
return len(self.keys())
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self._data)
|
mattcaldwell/zipline
|
zipline/protocol.py
|
Python
|
apache-2.0
| 14,487
|
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
COHORTE configuration file parser: converts a parsed configuration file to
beans
:author: Thomas Calmant
:license: Apache Software License 2.0
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Python standard library
import collections
import logging
import uuid
# iPOPO Decorators
from pelix.ipopo.decorators import ComponentFactory, Provides, Instantiate, \
Requires
# COHORTE constants
import cohorte
# ------------------------------------------------------------------------------
# Documentation strings format
__docformat__ = "restructuredtext en"
# Version
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
# Component to be instantiated
Component = collections.namedtuple(
'Component', ('factory', 'name', 'properties'))
# Bundle to be installed
Bundle = collections.namedtuple(
'Bundle', ('name', 'filename', 'properties', 'version', 'optional'))
# Simplest configuration possible
BootConfiguration = collections.namedtuple(
'BootConfiguration', ('bundles', 'composition', 'properties',
'environment', 'boot_args'))
# Boot configuration + Isolate basic description
Isolate = collections.namedtuple(
'Isolate', BootConfiguration._fields + ('name', 'kind', 'node',
'level', 'sublevel'))
def _recursive_namedtuple_convert(data):
"""
Recursively converts the named tuples in the given object to dictionaries
:param data: An object in a named tuple or its children
:return: The converted object
"""
if isinstance(data, list):
# List
return [_recursive_namedtuple_convert(item) for item in data]
elif hasattr(data, '_asdict'):
# Named tuple
dict_value = dict(data._asdict())
for key, value in dict_value.items():
dict_value[key] = _recursive_namedtuple_convert(value)
return dict_value
else:
# Standard object
return data
# ------------------------------------------------------------------------------
@ComponentFactory('cohorte-config-parser-factory')
@Provides(cohorte.SERVICE_CONFIGURATION_READER)
@Requires('_reader', cohorte.SERVICE_FILE_READER)
@Instantiate('cohorte-config-parser')
class BootConfigParser(object):
"""
Boot configuration parser
"""
def __init__(self):
"""
Sets up the members
"""
# File reader
self._reader = None
# Loaded isolates configurations
self._isolates = None
@staticmethod
def _parse_bundle(json_object):
"""
Reads the given JSON object and returns its Bundle representation
:param json_object: A parsed JSON object
:return: A Bundle object
:raise KeyError: A mandatory parameter is missing
"""
# Use a copy of the properties
properties = {}
json_properties = json_object.get('properties')
if json_properties:
properties.update(json_properties)
return Bundle(name=json_object['name'],
filename=json_object.get('file'),
properties=properties,
version=json_object.get('version'),
optional=json_object.get('optional', False))
def _parse_bundles(self, bundles):
"""
Parses the bundles in the given list. Returns an empty list if the
given one is None or empty.
:param bundles: A list of bundles representations
:return: A list of Bundle objects
:raise KeyError: A mandatory parameter is missing
"""
if not bundles:
return []
return [self._parse_bundle(bundle) for bundle in bundles]
@staticmethod
def _parse_component(json_object):
"""
Reads the given JSON object and returns its Component representation
:param json_object: A parsed JSON object
:return: A Component object
:raise KeyError: A mandatory parameter is missing
"""
# Mandatory values
factory = json_object['factory']
# Computed name (if needed)
name = json_object.get('name', factory + '-instance')
# Use a copy of the properties
properties = {}
json_properties = json_object.get('properties')
if json_properties:
properties.update(json_properties)
return Component(factory=factory, name=name, properties=properties)
def _parse_components(self, components):
"""
Parses the components in the given list. Returns an empty list if the
given one is None or empty.
:param components: A list of components representations
:return: A list of Component objects
:raise KeyError: A mandatory parameter is missing
"""
if not components:
return []
return [self._parse_component(component) for component in components]
def _parse_isolate(self, json_object):
"""
Reads the given JSON object and returns its Isolate representation
:param json_object: A parsed JSON object
:return: An Isolate object
:raise KeyError: A mandatory parameter is missing
"""
# Reuse the boot parser
boot_config = self.load_boot_dict(json_object)
return Isolate(name=json_object['name'],
kind=json_object['kind'],
level=json_object['level'],
sublevel=json_object['sublevel'],
# Reuse boot configuration values
**boot_config._asdict())
def _prepare_configuration(self, uid, name, kind,
bundles=None, composition=None,
base_configuration=None):
"""
Prepares and returns a configuration dictionary to be stored in the
configuration broker, to start an isolate of the given kind.
:param uid: The isolate UID
:param name: The isolate name
:param kind: The kind of isolate to boot
:param bundles: Extra bundles to install
:param composition: Extra components to instantiate
:param base_configuration: Base configuration (to override)
:return: A configuration dictionary
(updated base_configuration if given)
:raise IOError: Unknown/unaccessible kind of isolate
:raise KeyError: A parameter is missing in the configuration files
:raise ValueError: Error reading the configuration
"""
if isinstance(base_configuration, dict):
configuration = base_configuration
else:
configuration = {}
# Set up isolate properties
configuration['uid'] = uid \
or configuration.get('custom_uid') or str(uuid.uuid4())
configuration['name'] = name
configuration['kind'] = kind
# Boot configuration for this kind
new_boot = configuration.setdefault('boot', {})
new_boot.update(_recursive_namedtuple_convert(self.load_boot(kind)))
# Add bundles (or an empty list)
if bundles:
new_bundles = configuration.setdefault('bundles', [])
new_bundles.extend(_recursive_namedtuple_convert(
[self.normalize_bundle(bundle) for bundle in bundles]))
# Add components (or an empty list)
if composition:
new_compo = configuration.setdefault('composition', [])
new_compo.extend(_recursive_namedtuple_convert(composition))
# Return the configuration dictionary
return configuration
@staticmethod
def normalize_bundle(bundle):
"""
Make a Bundle object from the given Bundle-like object attributes,
using default values when necessary.
:param bundle: A Bundle-like object
:return: A Bundle object
:raise AttributeError: A mandatory attribute is missing
:raise ValueError: Invalid attribute value
"""
if isinstance(bundle, Bundle):
# Already a bundle
return bundle
# Bundle name is mandatory
name = bundle.name
if not name:
raise ValueError("A bundle must have a name: {0}".format(bundle))
# Get the filename
for fileattr in ('filename', 'file'):
filename = getattr(bundle, fileattr, None)
if filename:
break
# Normalize bundle properties
properties = getattr(bundle, 'properties', {})
if not isinstance(properties, dict):
properties = {}
# Normalize bundle version
version = getattr(bundle, 'version', None)
if version is not None:
version = str(version)
return Bundle(name, filename, properties, version,
getattr(bundle, 'optional', False))
def load_boot(self, kind):
"""
Loads the boot configuration for the given kind of isolate, or returns
the one in the cache.
:param kind: The kind of isolate to boot
:return: The loaded BootConfiguration object
:raise IOError: Unknown/unaccessible kind of isolate
:raise KeyError: A parameter is missing in the configuration files
:raise ValueError: Error reading the configuration
"""
# Prepare & store the bean representation
return self.load_boot_dict(self.load_conf_raw('boot', kind))
def load_conf_raw(self, level, kind):
"""
Loads the boot configuration for the given kind of isolate, or returns
the one in the cache.
:param level: The level of configuration (boot, java, python)
:param kind: The kind of isolate to boot
:return: The loaded BootConfiguration object
:raise IOError: Unknown/unaccessible kind of isolate
:raise KeyError: A parameter is missing in the configuration files
:raise ValueError: Error reading the configuration
"""
# Load the boot file
return self.read('{0}-{1}.js'.format(level, kind))
def load_boot_dict(self, dict_config):
"""
Parses a boot configuration from the given dictionary
:param dict_config: A configuration dictionary
:return: The parsed BootConfiguration object
:raise KeyError: A parameter is missing in the configuration files
:raise ValueError: Error reading the configuration
"""
# Use a copy of environment
environment = {}
json_env = dict_config.get('environment')
if json_env:
environment.update(json_env)
# Parse the properties
properties = {}
dict_properties = dict_config.get('properties')
if dict_properties:
properties.update(dict_properties)
# Prepare the bean representation
bundles = self._parse_bundles(dict_config.get('bundles'))
composition = self._parse_components(dict_config.get('composition'))
return BootConfiguration(bundles=bundles,
composition=composition,
boot_args=dict_config.get('boot_args'),
environment=environment,
properties=properties)
def prepare_isolate(self, uid, name, kind, level, sublevel,
bundles=None, composition=None):
"""
Prepares and returns a configuration dictionary to be stored in the
configuration broker, to start an isolate of the given kind.
:param uid: The isolate UID
:param name: The isolate name
:param kind: The kind of isolate to boot (pelix, osgi, ...)
:param level: The level of configuration (boot, java, python, ...)
:param sublevel: Category of configuration (monitor, isolate, ...)
:param bundles: Extra bundles to install
:param composition: Extra components to instantiate
:return: A configuration dictionary
:raise IOError: Unknown/unaccessible kind of isolate
:raise KeyError: A parameter is missing in the configuration files
:raise ValueError: Error reading the configuration
"""
# Load the isolate model file
configuration = self.load_conf_raw(level, sublevel)
try:
# Try to load the isolate-specific configuration
# without logging "file not found" errors
isolate_conf = self.read(name + ".js", False)
except IOError:
# Ignore I/O errors (file not found)
# Propagate ValueError (parsing errors)
pass
else:
# Merge the configurations: this method considers that the first
# parameter has priority on the second
configuration = self._reader.merge_object(isolate_conf,
configuration)
# Extend with the boot configuration
return self._prepare_configuration(uid, name, kind,
bundles, composition, configuration)
def read(self, filename, reader_log_error=True):
"""
Reads the content of the given file, without parsing it.
:param filename: A configuration file name
:param reader_log_error: If True, the reader will log I/O errors
:return: The dictionary read from the file
"""
return self._reader.load_file(filename, 'conf',
log_error=reader_log_error)
|
ahmadshahwan/cohorte-runtime
|
python/cohorte/config/parser.py
|
Python
|
apache-2.0
| 14,427
|
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from opencensus.common import utils
def _format_attribute_value(value):
if isinstance(value, bool):
value_type = 'bool_value'
elif isinstance(value, int):
value_type = 'int_value'
elif isinstance(value, six.string_types):
value_type = 'string_value'
value = utils.get_truncatable_str(value)
elif isinstance(value, float):
value_type = 'double_value'
else:
return None
return {value_type: value}
class Attributes(object):
"""A set of attributes, each in the format [KEY]:[VALUE].
:type attributes: dict
:param attributes: The set of attributes. Each attribute's key can be up
to 128 bytes long. The value can be a string up to 256
bytes, an integer, a floating-point number, or the
Boolean values true and false.
"""
def __init__(self, attributes=None):
self.attributes = attributes or {}
def set_attribute(self, key, value):
"""Set a key value pair."""
self.attributes[key] = value
def delete_attribute(self, key):
"""Delete an attribute given a key if existed."""
self.attributes.pop(key, None)
def get_attribute(self, key):
"""Get a attribute value."""
return self.attributes.get(key, None)
def format_attributes_json(self):
"""Convert the Attributes object to json format."""
attributes_json = {}
for key, value in self.attributes.items():
key = utils.check_str_length(key)[0]
value = _format_attribute_value(value)
if value is not None:
attributes_json[key] = value
result = {
'attributeMap': attributes_json
}
return result
|
census-instrumentation/opencensus-python
|
opencensus/trace/attributes.py
|
Python
|
apache-2.0
| 2,383
|
#!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import pydot
import os
__author__ = 'Shamal Faily'
def dotToObstacleModel(graph,contextName,originatorName):
goals = []
goalNames = set([])
obstacles = []
acs = {}
for node in graph.get_nodes():
nodeShape = node.get_shape()
nodeStyle = str(node.get_style())
if nodeShape == 'box' and nodeStyle == 'rounded':
obstacles.append(node.get_name())
elif nodeShape == 'box' and nodeStyle == 'None':
nodeName = node.get_name()
if (nodeName != 'node' and nodeName != 'edge'):
goals.append(node.get_name())
goalNames.add(node.get_name())
elif nodeShape == 'triangle':
acs[node.get_name()] = node.get_label()
xmlBuf = '<?xml version="1.0"?>\n<!DOCTYPE cairis_model PUBLIC "-//CAIRIS//DTD MODEL 1.0//EN" "http://cairis.org/dtd/cairis_model.dtd">\n\n<cairis_model>\n\n'
xmlBuf += '<cairis>\n <project_settings name="' + contextName + '">\n <contributors>\n <contributor first_name="None" surname="None" affiliation="' + originatorName + '" role="Scribe" />\n </contributors>\n </project_settings>\n <environment name="' + contextName + '" short_code="' + contextName + '">\n <definition>' + contextName + '</definition>\n <asset_values>\n <none>TBC</none>\n <low>TBC</low>\n <medium>TBC</medium>\n <high>TBC</high>\n </asset_values>\n </environment>\n</cairis>\n\n<goals>\n'
for g in goals:
xmlBuf += ' <goal name=' + g + ' originator="' + originatorName + '">\n <goal_environment name="' + contextName + '" category="Maintain" priority="Medium">\n <definition>' + g + '</definition>\n <fit_criterion>TBC</fit_criterion>\n <issue>None</issue>\n </goal_environment>\n </goal>\n'
for o in obstacles:
xmlBuf += ' <obstacle name=' + o + ' originator="' + originatorName + '">\n <obstacle_environment name="' + contextName + '" category="Threat">\n <definition>' + o + '</definition>\n </obstacle_environment>\n </obstacle>\n'
xmlBuf += '</goals>\n\n'
fromAssocs = []
toAssocs = {}
assocs = []
for e in graph.get_edge_list():
fromName = e.get_source()
toName = e.get_destination()
if fromName in acs:
if fromName not in toAssocs:
toAssocs[fromName] = [toName]
else:
toAssocs[fromName].append(toName)
elif toName in acs:
fromAssocs.append((fromName,toName))
else:
if fromName in goalNames:
assocs.append(' <goal_association environment="' + contextName + '" goal_name=' + fromName + ' goal_dim="goal" ref_type="obstruct" subgoal_name=' + toName + ' subgoal_dim="obstacle" alternative_id="0">\n <rationale>None</rationale>\n </goal_association>\n')
else:
assocs.append(' <goal_association environment="' + contextName + '" goal_name=' + fromName + ' goal_dim="obstacle" ref_type="resolve" subgoal_name=' + toName + ' subgoal_dim="goal" alternative_id="0">\n <rationale>None</rationale>\n </goal_association>\n')
for fromName,toName in fromAssocs:
for subGoalName in toAssocs[toName]:
assocs.append(' <goal_association environment="' + contextName + '" goal_name=' + fromName + ' goal_dim="obstacle" ref_type=' + acs[toName] + ' subgoal_name=' + subGoalName + ' subgoal_dim="obstacle" alternative_id="0">\n <rationale>None</rationale>\n </goal_association>\n')
xmlBuf += '<associations>\n'
for assoc in assocs:
xmlBuf += assoc
xmlBuf += '</associations>\n\n</cairis_model>'
return xmlBuf
def main(args=None):
parser = argparse.ArgumentParser(description='Attack Tree to CAIRIS Model converter')
parser.add_argument('dotFile',help='attack tree model to import (Dot format)')
parser.add_argument('--context',dest='contextName',help='attack context')
parser.add_argument('--author',dest='originatorName',help='author/s')
parser.add_argument('--out',dest='outFile',help='output file (CAIRIS format)')
args = parser.parse_args()
dotInstance = pydot.graph_from_dot_file(args.dotFile)
xmlBuf = dotToObstacleModel(dotInstance[0],args.contextName,args.originatorName)
f = open(args.outFile,'w')
f.write(xmlBuf)
f.close()
if __name__ == '__main__':
main()
|
nathanbjenx/cairis
|
cairis/bin/at2om.py
|
Python
|
apache-2.0
| 5,006
|
# coding=utf-8
#
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® Network tunnels module.
REST URI
``http://localhost/mgmt/tm/net/tunnels``
GUI Path
``Network --> tunnels``
REST Kind
``tm:net:tunnels:*``
"""
from f5.bigip.resource import Collection
from f5.bigip.resource import OrganizingCollection
from f5.bigip.resource import Resource
class TunnelS(OrganizingCollection):
"""BIG-IP® network tunnels collection"""
def __init__(self, net):
super(TunnelS, self).__init__(net)
self._meta_data['allowed_lazy_attributes'] = [
Gres,
Tunnels,
Vxlans,
]
class Tunnels(Collection):
"""BIG-IP® network tunnels resource (collection for GRE, Tunnel, VXLANs"""
def __init__(self, tunnelS):
super(Tunnels, self).__init__(tunnelS)
self._meta_data['allowed_lazy_attributes'] = [Gres, Tunnel, Vxlans]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:tunnel:tunnelstate': Tunnel}
class Tunnel(Resource):
"""BIG-IP® tunnels tunnel resource"""
def __init__(self, tunnels):
super(Tunnel, self).__init__(tunnels)
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_json_kind'] =\
'tm:net:tunnels:tunnel:tunnelstate'
class Gres(Collection):
"""BIG-IP® tunnels GRE sub-collection"""
def __init__(self, tunnels):
super(Gres, self).__init__(tunnels)
self._meta_data['allowed_lazy_attributes'] = [Gre]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:gre:grestate': Gre}
class Gre(Resource):
"""BIG-IP® tunnels GRE sub-collection resource"""
def __init__(self, gres):
super(Gre, self).__init__(gres)
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_json_kind'] =\
'tm:net:tunnels:gre:grestate'
class Vxlans(Collection):
"""BIG-IP® tunnels VXLAN sub-collection"""
def __init__(self, tunnels):
super(Vxlans, self).__init__(tunnels)
self._meta_data['allowed_lazy_attributes'] = [Vxlan]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:vxlan:vxlanstate': Vxlan}
class Vxlan(Resource):
"""BIG-IP® tunnels VXLAN sub-collection resource"""
def __init__(self, vxlans):
super(Vxlan, self).__init__(vxlans)
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_json_kind'] =\
'tm:net:tunnels:vxlan:vxlanstate'
|
F5Networks/f5-common-python
|
f5/bigip/tm/net/tunnels.py
|
Python
|
apache-2.0
| 3,153
|
#!/usr/bin/env python
'''
Extract reads which aren't mapped from a SAM or SAM.gz file.
Behavior for PE:
-Write out PE only if both do not map (if either of the pair maps, neither is retained)
Behavior for SE:
-Write out SE if they don't map
Iterate over a SAM or SAM.gz file. take everything where the 3rd and
4th flag bit are set to 1 and write reads out to files.
0x1 template having multiple segments in sequencing
0x2 each segment properly aligned according to the aligner
0x4 segment unmapped
0x8 next segment in the template unmapped
0x10 SEQ being reverse complemented
0x20 SEQ of the next segment in the template being reversed
0x40 the first segment in the template
0x80 the last segment in the template
0x100 secondary alignment
0x200 not passing quality controls
0x400 PCR or optical duplicate
TODO:
1) Add support for retaining both reads if one of a pair don't map but the other does
2) Add support for retaining the pair (or SE) if a read maps with low mapq
Note:
It is necessary to double check that both pairs of the PE read really exist in the SAM
file just in case it somehow gets disordered. This is taken care of by keeping the PE
reads in a set of dictionaries and then deleting them once the pair is written.
In the case where a read is somehow labeled as paired, but the pair doesn't exist, the
read is NOT written.
'''
import sys
import os
from optparse import OptionParser # http://docs.python.org/library/optparse.html
import gzip
usage = "usage: %prog [options] -o output_base inputfile.SAM"
parser = OptionParser(usage=usage, version="%prog 2.0.1")
parser.add_option('-u', '--uncompressed', help="leave output files uncompressed",
action="store_true", dest="uncompressed")
parser.add_option('-o', '--output_base', help="output file basename",
action="store", type="str", dest="output_base", default="screened")
parser.add_option('-t', '--tab-seperated', help="seperated out the output in tab",
action="store_true", dest="tabSeperated")
parser.add_option('-v', '--verbose', help="verbose output",
action="store_false", dest="verbose", default=True)
(options, args) = parser.parse_args() # uncomment this line for command line support
if len(args) == 1:
infile = args[0]
# Start opening input/output files:
if not os.path.exists(infile):
print >> sys.stderr, "Error, can't find input file %s" % infile
sys.exit()
if infile.split(".")[-1] == "gz":
insam = gzip.open(infile, 'rb')
else:
insam = open(infile, 'r')
else:
# reading from stdin
insam = sys.stdin
base = options.output_base
PE1 = {}
PE2 = {}
contig_map = {}
interleaved = False
def writeread(ID, r1, r2):
if interleaved:
if options.tabSeperated is True:
# read1
print ID + "\t" + r1[0] + "\t" + r1[1] + "\t" + r2[0] + "\t" + r2[1] + "\n"
else:
print "@" + ID + "#0/1"
print r1[0]
print '+\n' + r1[1]
# read2
print "@" + ID + "#0/2"
print r2[0]
print '+\n' + r2[1]
else:
# read1
outPE1.write("@" + ID + "#0/1" '\n')
outPE1.write(r1[0] + '\n')
outPE1.write('+\n' + r1[1] + '\n')
# read2
outPE2.write("@" + ID + "#0/2" '\n')
outPE2.write(r2[0] + '\n')
outPE2.write('+\n' + r2[1] + '\n')
i = 0
PE_written = 0
SE_written = 0
SE_open = False
PE_open = False
line2 = []
for line in insam:
# Comment/header lines start with @
if line[0] != "@" and len(line.strip().split()) > 2:
line2 = line.strip().split()
flag = int(line2[1])
if (flag & 0x100): # secondary alignment
continue
i += 1
# Handle SE:
# unapped SE reads have 0x1 set to 0, and 0x4 (third bit) set to 1
if (flag & 0x1 == 0) and (flag & 0x4):
ID = line2[0].split("#")[0]
if not SE_open:
if base == "stdout":
interleaved = True
elif options.uncompressed:
outSE = open(base + "_SE.fastq", 'w')
else:
outSE = gzip.open(base + "_SE.fastq.gz", 'wb')
SE_open = True
# interleaved just means to stdout in this case
if (interleaved):
if options.tabSeperated is True:
print ID + "\t" + line2[9] + "\t" + line2[10] + "\n"
else:
print "@" + ID
print line2[9]
print '+\n' + line2[10]
else:
outSE.write("@" + ID + '\n')
outSE.write(line2[9] + '\n')
outSE.write('+\n' + line2[10] + '\n')
SE_written += 1
continue
# Handle PE:
# logic: 0x1 = multiple segments in sequencing, 0x4 = segment unmapped, 0x8 = next segment unmapped, 0x80 the last segment in the template
if ((flag & 0x1) and (flag & 0x4) and (flag & 0x8)):
if not PE_open:
if base == "stdout":
interleaved = True
elif options.uncompressed:
outPE1 = open(base + "_PE1.fastq", 'w')
outPE2 = open(base + "_PE2.fastq", 'w')
else:
outPE1 = gzip.open(base + "_PE1.fastq.gz", 'wb')
outPE2 = gzip.open(base + "_PE2.fastq.gz", 'wb')
PE_open = True
if (flag & 0x40): # is this PE1 (first segment in template)
# PE1 read, check that PE2 is in dict and write out
ID = line2[0].split("#")[0]
r1 = [line2[9], line2[10]] # sequence + qual
if ID in PE2:
writeread(ID, r1, PE2[ID])
del PE2[ID]
PE_written += 1
else:
PE1[ID] = r1
continue
elif (flag & 0x80): # is this PE2 (last segment in template)
# PE2 read, check that PE1 is in dict and write out
ID = line2[0].split("#")[0]
r2 = [line2[9], line2[10]]
if ID in PE1:
writeread(ID, PE1[ID], r2)
del PE1[ID]
PE_written += 1
else:
PE2[ID] = r2
continue
# was mapped, count it up
# if line2 != []:
# contig = line2[2]
# if contig in contig_map.keys():
# if (flag & 0x1 == 0): # SE
# contig_map[contig]["SE"] += 1
# elif (flag & 0x40): # PE, Just count the first in the pair
# contig_map[contig]["PE"] += 1
# else:
# contig_map[contig] = {}
# if (flag & 0x1 == 0): # SE
# contig_map[contig]["SE"] = 1
# contig_map[contig]["PE"] = 0
# elif (flag & 0x40): # PE, Just count the first in the pair
# contig_map[contig]["SE"] = 0
# contig_map[contig]["PE"] = 1
# for k in contig_map.keys():
# print >> sys.stderr, "\tFound %s: percent: %.2f, PE mapped: %s, SE mapped: %s" % (k, (2*PE_written+SE_written)/i, contig_map[k]["PE"], contig_map[k]["SE"])
print >> sys.stderr, "Records processed: %s | PE_written: %s | SE_written: %s | Discarded: %s " % (i, PE_written, SE_written, i-(PE_written*2+SE_written))
if base != "stdout":
if PE_open:
outPE1.close()
outPE2.close()
if SE_open:
outSE.close()
|
msettles/expHTS
|
expHTS/extract_unmapped_reads.py
|
Python
|
apache-2.0
| 7,685
|
from ionotomo import *
import numpy as np
import pylab as plt
def test_turbulent_realisation(plot=True):
xvec = np.linspace(-100,100,100)
zvec = np.linspace(0,1000,1000)
M = np.zeros([100,100,1000])
TCI = TriCubic(xvec,xvec,zvec,M)
print("Matern 1/2 kernel")
cov_obj = Covariance(tci=TCI)
sigma = 1.
corr = 30.
nu = 1./2.
print("Testing spectral density")
B = cov_obj.realization()
print("Fluctuations measured {}".format((np.percentile(B.flatten(),95) + np.percentile(-B.flatten(),95))))
#xy slice
x = TCI.xvec
y = TCI.yvec
z = TCI.zvec
X,Y,Z = np.meshgrid(x,y,z,indexing='ij')
dx = x[1] - x[0]
dy = y[1] - y[0]
dz = z[1] - z[0]
if plot and True:
f = plt.figure(figsize=(8,4))
vmin = np.min(B)
vmax = np.max(B)
ax = f.add_subplot(1,3,1)
ax.imshow(B[49,:,:],extent=(z[0],z[-1],y[0],y[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,2)
plt.imshow(B[:,49,:],extent=(z[0],z[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,3)
im = plt.imshow(B[:,:,499],extent=(y[0],y[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
plt.colorbar(im)
plt.show()
print("testing contraction C^{-1}.phi")
phi = np.zeros_like(TCI.M)
#phi = np.cos(R*4)*np.exp(-R)
phi = X**2 + Y**2 + Z**4
phihat = cov_obj.contract(phi)
assert not np.any(np.isnan(phihat))
#Analytic for exp covariance is 1/(8*np.pi*sigma**2) * (1/L**3 * phi - 2/L * Lap phi + L * Lap Lap phi)
# 1/(8*np.pi*sigma**2) * (1/L**3 * phi + 2/L * sin(2 pi Z / 20)*(2*pi/20)**2 + L * sin(2 pi Z / 20)*(2*pi/20)**4)
phih = 1./(8*np.pi*sigma**2) * ( 1./corr**3 * phi - 2./corr *(2 + 2 + 2*Z**2) + corr*4)
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
im = plt.imshow(phih[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.tight_layout()
plt.show()
return
phih = phi.copy()/corr**3
from scipy import ndimage
stencil = np.zeros([3,3,3])
for i in range(-1,2):
for j in range(-1,2):
for k in range(-1,2):
s = 0
if i == 0:
s += 1
if j == 0:
s += 1
if k == 0:
s += 1
if s == 3:
stencil[i,j,k] = -2*3.
if s == 3 - 1:
stencil[i,j,k] = 1.
stencil /= (dx*dy*dz)**(2./3.)
lap = ndimage.convolve(phi,stencil,mode='wrap')
phih -= 2/corr*lap
laplap = ndimage.convolve(lap,stencil,mode='wrap')
phih += corr*laplap
phih /= 8*np.pi*sigma**2
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
im = plt.imshow(phih[:,20,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.show()
|
Joshuaalbert/IonoTomo
|
src/ionotomo/tests/test_turbulent_realisation.py
|
Python
|
apache-2.0
| 5,260
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.test import attr
from tempest_lib import exceptions
from murano_tempest_tests.tests.api.application_catalog import base
from murano_tempest_tests import utils
class TestEnvironmentsNegative(base.BaseApplicationCatalogTest):
@attr(type='negative')
def test_delete_environment_with_wrong_env_id(self):
self.assertRaises(exceptions.NotFound,
self.application_catalog_client.delete_environment,
utils.generate_uuid())
@attr(type='negative')
def test_double_delete_environment(self):
name = utils.generate_name('double_del_negavive')
environment = self.application_catalog_client.\
create_environment(name)
self.application_catalog_client.delete_environment(environment['id'])
self.assertRaises(exceptions.NotFound,
self.application_catalog_client.delete_environment,
environment['id'])
@attr(type='negative')
def test_get_deleted_environment(self):
name = utils.generate_name('double_del_negavive')
environment = self.application_catalog_client.\
create_environment(name)
self.application_catalog_client.delete_environment(environment['id'])
self.assertRaises(exceptions.NotFound,
self.application_catalog_client.get_environment,
environment['id'])
class TestEnvironmentNegativeTenantIsolation(base.BaseApplicationCatalogTest):
@classmethod
def resource_setup(cls):
super(TestEnvironmentNegativeTenantIsolation, cls).resource_setup()
name = utils.generate_name(cls.__name__)
cls.environment = cls.application_catalog_client.\
create_environment(name)
cls.alt_client = cls.get_client_with_isolated_creds(
type_of_creds='alt')
@classmethod
def resource_cleanup(cls):
cls.application_catalog_client.\
delete_environment(cls.environment['id'])
super(TestEnvironmentNegativeTenantIsolation, cls).resource_cleanup()
@attr(type='negative')
def test_get_environment_from_another_tenant(self):
self.assertRaises(exceptions.Forbidden,
self.alt_client.get_environment,
self.environment['id'])
@attr(type='negative')
def test_update_environment_from_another_tenant(self):
self.assertRaises(exceptions.Forbidden,
self.alt_client.update_environment,
self.environment['id'])
@attr(type='negative')
def test_delete_environment_from_another_tenant(self):
self.assertRaises(exceptions.Forbidden,
self.alt_client.delete_environment,
self.environment['id'])
|
olivierlemasle/murano
|
murano_tempest_tests/tests/api/application_catalog/test_environments_negative.py
|
Python
|
apache-2.0
| 3,454
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..images.get import get_image
from ..images.list import list_images
def test_list_images():
images = list_images("debian-cloud")
for img in images:
assert img.kind == "compute#image"
assert img.self_link.startswith(
"https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/"
)
def test_get_image():
images = list_images("debian-cloud")
image = next(iter(images))
image2 = get_image("debian-cloud", image.name)
assert image == image2
|
googleapis/python-compute
|
samples/snippets/tests/test_sample_images.py
|
Python
|
apache-2.0
| 1,097
|
from functools import wraps
from itertools import chain
from django.db.models import Prefetch, Q
from django.urls import Resolver404, resolve
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from rest_framework.authentication import SessionAuthentication
from rest_framework.decorators import action
from rest_framework.exceptions import NotFound, ParseError, PermissionDenied, ValidationError
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from rest_framework.viewsets import ReadOnlyModelViewSet, ViewSet
from shapely import prepared
from shapely.ops import cascaded_union
from c3nav.api.utils import get_api_post_data
from c3nav.editor.forms import ChangeSetForm, RejectForm
from c3nav.editor.models import ChangeSet
from c3nav.editor.utils import LevelChildEditUtils, SpaceChildEditUtils
from c3nav.editor.views.base import etag_func
from c3nav.mapdata.api import api_etag
from c3nav.mapdata.models import Area, MapUpdate, Source
from c3nav.mapdata.models.geometry.space import POI
from c3nav.mapdata.utils.user import can_access_editor
class EditorViewSetMixin(ViewSet):
def initial(self, request, *args, **kwargs):
if not can_access_editor(request):
raise PermissionDenied
return super().initial(request, *args, **kwargs)
def api_etag_with_update_cache_key(**outkwargs):
outkwargs.setdefault('cache_kwargs', {})['update_cache_key_match'] = bool
def wrapper(func):
func = api_etag(**outkwargs)(func)
@wraps(func)
def wrapped_func(self, request, *args, **kwargs):
try:
changeset = request.changeset
except AttributeError:
changeset = ChangeSet.get_for_request(request)
request.changeset = changeset
update_cache_key = request.changeset.raw_cache_key_without_changes
update_cache_key_match = request.GET.get('update_cache_key') == update_cache_key
return func(self, request, *args,
update_cache_key=update_cache_key, update_cache_key_match=update_cache_key_match,
**kwargs)
return wrapped_func
return wrapper
class EditorViewSet(EditorViewSetMixin, ViewSet):
"""
Editor API
/geometries/ returns a list of geojson features, you have to specify ?level=<id> or ?space=<id>
/geometrystyles/ returns styling information for all geometry types
/bounds/ returns the maximum bounds of the map
/{path}/ insert an editor path to get an API represantation of it. POST requests on forms are possible as well
"""
lookup_field = 'path'
lookup_value_regex = r'.+'
@staticmethod
def _get_level_geometries(level):
buildings = level.buildings.all()
buildings_geom = cascaded_union([building.geometry for building in buildings])
spaces = {space.pk: space for space in level.spaces.all()}
holes_geom = []
for space in spaces.values():
if space.outside:
space.geometry = space.geometry.difference(buildings_geom)
columns = [column.geometry for column in space.columns.all()]
if columns:
columns_geom = cascaded_union([column.geometry for column in space.columns.all()])
space.geometry = space.geometry.difference(columns_geom)
holes = [hole.geometry for hole in space.holes.all()]
if holes:
space_holes_geom = cascaded_union(holes)
holes_geom.append(space_holes_geom.intersection(space.geometry))
space.geometry = space.geometry.difference(space_holes_geom)
for building in buildings:
building.original_geometry = building.geometry
if holes_geom:
holes_geom = cascaded_union(holes_geom)
holes_geom_prep = prepared.prep(holes_geom)
for obj in buildings:
if holes_geom_prep.intersects(obj.geometry):
obj.geometry = obj.geometry.difference(holes_geom)
results = []
results.extend(buildings)
for door in level.doors.all():
results.append(door)
results.extend(spaces.values())
return results
@staticmethod
def _get_levels_pk(request, level):
# noinspection PyPep8Naming
Level = request.changeset.wrap_model('Level')
levels_under = ()
levels_on_top = ()
lower_level = level.lower(Level).first()
primary_levels = (level,) + ((lower_level,) if lower_level else ())
secondary_levels = Level.objects.filter(on_top_of__in=primary_levels).values_list('pk', 'on_top_of')
if lower_level:
levels_under = tuple(pk for pk, on_top_of in secondary_levels if on_top_of == lower_level.pk)
if True:
levels_on_top = tuple(pk for pk, on_top_of in secondary_levels if on_top_of == level.pk)
levels = chain([level.pk], levels_under, levels_on_top)
return levels, levels_on_top, levels_under
@staticmethod
def area_sorting_func(area):
groups = tuple(area.groups.all())
if not groups:
return (0, 0, 0)
return (1, groups[0].category.priority, groups[0].hierarchy, groups[0].priority)
# noinspection PyPep8Naming
@action(detail=False, methods=['get'])
@api_etag_with_update_cache_key(etag_func=etag_func, cache_parameters={'level': str, 'space': str})
def geometries(self, request, update_cache_key, update_cache_key_match, *args, **kwargs):
Level = request.changeset.wrap_model('Level')
Space = request.changeset.wrap_model('Space')
Column = request.changeset.wrap_model('Column')
Hole = request.changeset.wrap_model('Hole')
AltitudeMarker = request.changeset.wrap_model('AltitudeMarker')
Building = request.changeset.wrap_model('Building')
Door = request.changeset.wrap_model('Door')
LocationGroup = request.changeset.wrap_model('LocationGroup')
WifiMeasurement = request.changeset.wrap_model('WifiMeasurement')
level = request.GET.get('level')
space = request.GET.get('space')
if level is not None:
if space is not None:
raise ValidationError('Only level or space can be specified.')
level = get_object_or_404(Level.objects.filter(Level.q_for_request(request)), pk=level)
edit_utils = LevelChildEditUtils(level, request)
if not edit_utils.can_access_child_base_mapdata:
raise PermissionDenied
levels, levels_on_top, levels_under = self._get_levels_pk(request, level)
# don't prefetch groups for now as changesets do not yet work with m2m-prefetches
levels = Level.objects.filter(pk__in=levels).filter(Level.q_for_request(request))
# graphnodes_qs = request.changeset.wrap_model('GraphNode').objects.all()
levels = levels.prefetch_related(
Prefetch('spaces', Space.objects.filter(Space.q_for_request(request)).only(
'geometry', 'level', 'outside'
)),
Prefetch('doors', Door.objects.filter(Door.q_for_request(request)).only('geometry', 'level')),
Prefetch('spaces__columns', Column.objects.filter(
Q(access_restriction__isnull=True) | ~Column.q_for_request(request)
).only('geometry', 'space')),
Prefetch('spaces__groups', LocationGroup.objects.only(
'color', 'category', 'priority', 'hierarchy', 'category__priority', 'category__allow_spaces'
)),
Prefetch('buildings', Building.objects.only('geometry', 'level')),
Prefetch('spaces__holes', Hole.objects.only('geometry', 'space')),
Prefetch('spaces__altitudemarkers', AltitudeMarker.objects.only('geometry', 'space')),
Prefetch('spaces__wifi_measurements', WifiMeasurement.objects.only('geometry', 'space')),
# Prefetch('spaces__graphnodes', graphnodes_qs)
)
levels = {s.pk: s for s in levels}
level = levels[level.pk]
levels_under = [levels[pk] for pk in levels_under]
levels_on_top = [levels[pk] for pk in levels_on_top]
# todo: permissions
# graphnodes = tuple(chain(*(space.graphnodes.all()
# for space in chain(*(level.spaces.all() for level in levels.values())))))
# graphnodes_lookup = {node.pk: node for node in graphnodes}
# graphedges = request.changeset.wrap_model('GraphEdge').objects.all()
# graphedges = graphedges.filter(Q(from_node__in=graphnodes) | Q(to_node__in=graphnodes))
# graphedges = graphedges.select_related('waytype')
# this is faster because we only deserialize graphnode geometries once
# missing_graphnodes = graphnodes_qs.filter(pk__in=set(chain(*((edge.from_node_id, edge.to_node_id)
# for edge in graphedges))))
# graphnodes_lookup.update({node.pk: node for node in missing_graphnodes})
# for edge in graphedges:
# edge._from_node_cache = graphnodes_lookup[edge.from_node_id]
# edge._to_node_cache = graphnodes_lookup[edge.to_node_id]
# graphedges = [edge for edge in graphedges if edge.from_node.space_id != edge.to_node.space_id]
results = chain(
*(self._get_level_geometries(l) for l in levels_under),
self._get_level_geometries(level),
*(self._get_level_geometries(l) for l in levels_on_top),
*(space.altitudemarkers.all() for space in level.spaces.all()),
*(space.wifi_measurements.all() for space in level.spaces.all())
# graphedges,
# graphnodes,
)
elif space is not None:
space_q_for_request = Space.q_for_request(request)
qs = Space.objects.filter(space_q_for_request)
space = get_object_or_404(qs.select_related('level', 'level__on_top_of'), pk=space)
level = space.level
edit_utils = SpaceChildEditUtils(space, request)
if not edit_utils.can_access_child_base_mapdata:
raise PermissionDenied
if request.user_permissions.can_access_base_mapdata:
doors = [door for door in level.doors.filter(Door.q_for_request(request)).all()
if door.geometry.intersects(space.geometry)]
doors_space_geom = cascaded_union([door.geometry for door in doors]+[space.geometry])
levels, levels_on_top, levels_under = self._get_levels_pk(request, level.primary_level)
if level.on_top_of_id is not None:
levels = chain([level.pk], levels_on_top)
other_spaces = Space.objects.filter(space_q_for_request, level__pk__in=levels).only(
'geometry', 'level'
).prefetch_related(
Prefetch('groups', LocationGroup.objects.only(
'color', 'category', 'priority', 'hierarchy', 'category__priority', 'category__allow_spaces'
).filter(color__isnull=False))
)
space = next(s for s in other_spaces if s.pk == space.pk)
other_spaces = [s for s in other_spaces
if s.geometry.intersects(doors_space_geom) and s.pk != space.pk]
all_other_spaces = other_spaces
if level.on_top_of_id is None:
other_spaces_lower = [s for s in other_spaces if s.level_id in levels_under]
other_spaces_upper = [s for s in other_spaces if s.level_id in levels_on_top]
else:
other_spaces_lower = [s for s in other_spaces if s.level_id == level.on_top_of_id]
other_spaces_upper = []
other_spaces = [s for s in other_spaces if s.level_id == level.pk]
space.bounds = True
# deactivated for performance reasons
buildings = level.buildings.all()
# buildings_geom = cascaded_union([building.geometry for building in buildings])
# for other_space in other_spaces:
# if other_space.outside:
# other_space.geometry = other_space.geometry.difference(buildings_geom)
for other_space in chain(other_spaces, other_spaces_lower, other_spaces_upper):
other_space.opacity = 0.4
other_space.color = '#ffffff'
for building in buildings:
building.opacity = 0.5
else:
buildings = []
doors = []
other_spaces = []
other_spaces_lower = []
other_spaces_upper = []
all_other_spaces = []
# todo: permissions
if request.user_permissions.can_access_base_mapdata:
graphnodes = request.changeset.wrap_model('GraphNode').objects.all()
graphnodes = graphnodes.filter((Q(space__in=all_other_spaces)) | Q(space__pk=space.pk))
space_graphnodes = tuple(node for node in graphnodes if node.space_id == space.pk)
graphedges = request.changeset.wrap_model('GraphEdge').objects.all()
space_graphnodes_ids = tuple(node.pk for node in space_graphnodes)
graphedges = graphedges.filter(Q(from_node__pk__in=space_graphnodes_ids) |
Q(to_node__pk__in=space_graphnodes_ids))
graphedges = graphedges.select_related('from_node', 'to_node', 'waytype').only(
'from_node__geometry', 'to_node__geometry', 'waytype__color'
)
else:
graphnodes = []
graphedges = []
areas = space.areas.filter(Area.q_for_request(request)).only(
'geometry', 'space'
).prefetch_related(
Prefetch('groups', LocationGroup.objects.order_by(
'-category__priority', '-hierarchy', '-priority'
).only(
'color', 'category', 'priority', 'hierarchy', 'category__priority', 'category__allow_areas'
))
)
for area in areas:
area.opacity = 0.5
areas = sorted(areas, key=self.area_sorting_func)
results = chain(
buildings,
other_spaces_lower,
doors,
other_spaces,
[space],
areas,
space.holes.all().only('geometry', 'space'),
space.stairs.all().only('geometry', 'space'),
space.ramps.all().only('geometry', 'space'),
space.obstacles.all().only('geometry', 'space', 'color'),
space.lineobstacles.all().only('geometry', 'width', 'space', 'color'),
space.columns.all().only('geometry', 'space'),
space.altitudemarkers.all().only('geometry', 'space'),
space.wifi_measurements.all().only('geometry', 'space'),
space.pois.filter(POI.q_for_request(request)).only('geometry', 'space').prefetch_related(
Prefetch('groups', LocationGroup.objects.only(
'color', 'category', 'priority', 'hierarchy', 'category__priority', 'category__allow_pois'
).filter(color__isnull=False))
),
other_spaces_upper,
graphedges,
graphnodes
)
else:
raise ValidationError('No level or space specified.')
return Response(list(chain(
[('update_cache_key', update_cache_key)],
(self.conditional_geojson(obj, update_cache_key_match) for obj in results)
)))
def conditional_geojson(self, obj, update_cache_key_match):
if update_cache_key_match and not obj._affected_by_changeset:
return obj.get_geojson_key()
result = obj.to_geojson(instance=obj)
result['properties']['changed'] = obj._affected_by_changeset
return result
@action(detail=False, methods=['get'])
@api_etag(etag_func=MapUpdate.current_cache_key, cache_parameters={})
def geometrystyles(self, request, *args, **kwargs):
return Response({
'building': '#aaaaaa',
'space': '#eeeeee',
'hole': 'rgba(255, 0, 0, 0.3)',
'door': '#ffffff',
'area': '#55aaff',
'stair': '#a000a0',
'ramp': 'rgba(160, 0, 160, 0.2)',
'obstacle': '#999999',
'lineobstacle': '#999999',
'column': 'rgba(0, 0, 50, 0.3)',
'poi': '#4488cc',
'shadow': '#000000',
'graphnode': '#009900',
'graphedge': '#00CC00',
'altitudemarker': '#0000FF',
'wifimeasurement': '#DDDD00',
})
@action(detail=False, methods=['get'])
@api_etag(etag_func=etag_func, cache_parameters={})
def bounds(self, request, *args, **kwargs):
return Response({
'bounds': Source.max_bounds(),
})
def __getattr__(self, name):
# allow POST and DELETE methods for the editor API
if getattr(self, 'get', None).__name__ in ('list', 'retrieve'):
if name == 'post' and (self.resolved.url_name.endswith('.create') or
self.resolved.url_name.endswith('.edit')):
return self.post_or_delete
if name == 'delete' and self.resolved.url_name.endswith('.edit'):
return self.post_or_delete
raise AttributeError
def post_or_delete(self, request, *args, **kwargs):
# django-rest-framework doesn't automatically do this for logged out requests
SessionAuthentication().enforce_csrf(request)
return self.retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
@cached_property
def resolved(self):
resolved = None
path = self.kwargs.get('path', '')
if path:
try:
resolved = resolve('/editor/'+path+'/')
except Resolver404:
pass
if not resolved:
try:
resolved = resolve('/editor/'+path)
except Resolver404:
pass
self.request.sub_resolver_match = resolved
return resolved
def retrieve(self, request, *args, **kwargs):
resolved = self.resolved
if not resolved:
raise NotFound(_('No matching editor view endpoint found.'))
if not getattr(resolved.func, 'api_hybrid', False):
raise NotFound(_('Matching editor view point does not provide an API.'))
get_api_post_data(request)
response = resolved.func(request, api=True, *resolved.args, **resolved.kwargs)
return response
class ChangeSetViewSet(EditorViewSetMixin, ReadOnlyModelViewSet):
"""
List and manipulate changesets. All lists are ordered by last update descending. Use ?offset= to specify an offset.
Don't forget to set X-Csrftoken for POST requests!
/ lists all changesets this user can see.
/user/ lists changesets by this user
/reviewing/ lists changesets this user is currently reviewing.
/pending_review/ lists changesets this user can review.
/current/ returns the current changeset.
/direct_editing/ POST to activate direct editing (if available).
/deactive/ POST to deactivate current changeset or deactivate direct editing
/{id}/changes/ list all changes of a given changeset.
/{id}/activate/ POST to activate given changeset.
/{id}/edit/ POST to edit given changeset (provide title and description in POST data).
/{id}/restore_object/ POST to restore an object deleted by this changeset (provide change id as id in POST data).
/{id}/delete/ POST to delete given changeset.
/{id}/propose/ POST to propose given changeset.
/{id}/unpropose/ POST to unpropose given changeset.
/{id}/review/ POST to review given changeset.
/{id}/reject/ POST to reject given changeset (provide reject=1 in POST data for final rejection).
/{id}/unreject/ POST to unreject given changeset.
/{id}/apply/ POST to accept and apply given changeset.
"""
queryset = ChangeSet.objects.all()
def get_queryset(self):
return ChangeSet.qs_for_request(self.request).select_related('last_update', 'last_state_update', 'last_change')
def _list(self, request, qs):
offset = 0
if 'offset' in request.GET:
if not request.GET['offset'].isdigit():
raise ParseError('offset has to be a positive integer.')
offset = int(request.GET['offset'])
return Response([obj.serialize() for obj in qs.order_by('-last_update')[offset:offset+20]])
def list(self, request, *args, **kwargs):
return self._list(request, self.get_queryset())
@action(detail=False, methods=['get'])
def user(self, request, *args, **kwargs):
return self._list(request, self.get_queryset().filter(author=request.user))
@action(detail=False, methods=['get'])
def reviewing(self, request, *args, **kwargs):
return self._list(request, self.get_queryset().filter(
assigned_to=request.user, state='review'
))
@action(detail=False, methods=['get'])
def pending_review(self, request, *args, **kwargs):
return self._list(request, self.get_queryset().filter(
state__in=('proposed', 'reproposed'),
))
def retrieve(self, request, *args, **kwargs):
return Response(self.get_object().serialize())
@action(detail=False, methods=['get'])
def current(self, request, *args, **kwargs):
changeset = ChangeSet.get_for_request(request)
return Response({
'direct_editing': changeset.direct_editing,
'changeset': changeset.serialize() if changeset.pk else None,
})
@action(detail=False, methods=['post'])
def direct_editing(self, request, *args, **kwargs):
# django-rest-framework doesn't automatically do this for logged out requests
SessionAuthentication().enforce_csrf(request)
if not ChangeSet.can_direct_edit(request):
raise PermissionDenied(_('You don\'t have the permission to activate direct editing.'))
changeset = ChangeSet.get_for_request(request)
if changeset.pk is not None:
raise PermissionDenied(_('You cannot activate direct editing if you have an active changeset.'))
request.session['direct_editing'] = True
return Response({
'success': True,
})
@action(detail=False, methods=['post'])
def deactivate(self, request, *args, **kwargs):
# django-rest-framework doesn't automatically do this for logged out requests
SessionAuthentication().enforce_csrf(request)
request.session.pop('changeset', None)
request.session['direct_editing'] = False
return Response({
'success': True,
})
@action(detail=True, methods=['get'])
def changes(self, request, *args, **kwargs):
changeset = self.get_object()
changeset.fill_changes_cache()
return Response([obj.serialize() for obj in changeset.iter_changed_objects()])
@action(detail=True, methods=['post'])
def activate(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_activate(request):
raise PermissionDenied(_('You can not activate this change set.'))
changeset.activate(request)
return Response({'success': True})
@action(detail=True, methods=['post'])
def edit(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_edit(request):
raise PermissionDenied(_('You cannot edit this change set.'))
form = ChangeSetForm(instance=changeset, data=get_api_post_data(request))
if not form.is_valid():
raise ParseError(form.errors)
changeset = form.instance
update = changeset.updates.create(user=request.user,
title=changeset.title, description=changeset.description)
changeset.last_update = update
changeset.save()
return Response({'success': True})
@action(detail=True, methods=['post'])
def restore_object(self, request, *args, **kwargs):
data = get_api_post_data(request)
if 'id' not in data:
raise ParseError('Missing id.')
restore_id = data['id']
if isinstance(restore_id, str) and restore_id.isdigit():
restore_id = int(restore_id)
if not isinstance(restore_id, int):
raise ParseError('id needs to be an integer.')
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_edit(request):
raise PermissionDenied(_('You can not edit changes on this change set.'))
try:
changed_object = changeset.changed_objects_set.get(pk=restore_id)
except Exception:
raise NotFound('could not find object.')
try:
changed_object.restore()
except PermissionError:
raise PermissionDenied(_('You cannot restore this object, because it depends on '
'a deleted object or it would violate a unique contraint.'))
return Response({'success': True})
@action(detail=True, methods=['post'])
def propose(self, request, *args, **kwargs):
if not request.user.is_authenticated:
raise PermissionDenied(_('You need to log in to propose changes.'))
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.title or not changeset.description:
raise PermissionDenied(_('You need to add a title an a description to propose this change set.'))
if not changeset.can_propose(request):
raise PermissionDenied(_('You cannot propose this change set.'))
changeset.propose(request.user)
return Response({'success': True})
@action(detail=True, methods=['post'])
def unpropose(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_unpropose(request):
raise PermissionDenied(_('You cannot unpropose this change set.'))
changeset.unpropose(request.user)
return Response({'success': True})
@action(detail=True, methods=['post'])
def review(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_start_review(request):
raise PermissionDenied(_('You cannot review these changes.'))
changeset.start_review(request.user)
return Response({'success': True})
@action(detail=True, methods=['post'])
def reject(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not not changeset.can_end_review(request):
raise PermissionDenied(_('You cannot reject these changes.'))
form = RejectForm(get_api_post_data(request))
if not form.is_valid():
raise ParseError(form.errors)
changeset.reject(request.user, form.cleaned_data['comment'], form.cleaned_data['final'])
return Response({'success': True})
@action(detail=True, methods=['post'])
def unreject(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_unreject(request):
raise PermissionDenied(_('You cannot unreject these changes.'))
changeset.unreject(request.user)
return Response({'success': True})
@action(detail=True, methods=['post'])
def apply(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_end_review(request):
raise PermissionDenied(_('You cannot accept and apply these changes.'))
changeset.apply(request.user)
return Response({'success': True})
@action(detail=True, methods=['post'])
def delete(self, request, *args, **kwargs):
changeset = self.get_object()
with changeset.lock_to_edit(request) as changeset:
if not changeset.can_delete(request):
raise PermissionDenied(_('You cannot delete this change set.'))
changeset.delete()
return Response({'success': True})
|
c3nav/c3nav
|
src/c3nav/editor/api.py
|
Python
|
apache-2.0
| 29,810
|