repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
googleapis/googleapis-gen
|
refs/heads/master
|
google/cloud/vision/v1p3beta1/vision-v1p3beta1-py/google/cloud/vision_v1p3beta1/services/product_search/transports/grpc_asyncio.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.vision_v1p3beta1.types import product_search_service
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import ProductSearchTransport, DEFAULT_CLIENT_INFO
from .grpc import ProductSearchGrpcTransport
class ProductSearchGrpcAsyncIOTransport(ProductSearchTransport):
"""gRPC AsyncIO backend transport for ProductSearch.
Manages Products and ProductSets of reference images for use in
product search. It uses the following resource model:
- The API has a collection of
[ProductSet][google.cloud.vision.v1p3beta1.ProductSet] resources,
named ``projects/*/locations/*/productSets/*``, which acts as a
way to put different products into groups to limit
identification.
In parallel,
- The API has a collection of
[Product][google.cloud.vision.v1p3beta1.Product] resources, named
``projects/*/locations/*/products/*``
- Each [Product][google.cloud.vision.v1p3beta1.Product] has a
collection of
[ReferenceImage][google.cloud.vision.v1p3beta1.ReferenceImage]
resources, named
``projects/*/locations/*/products/*/referenceImages/*``
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'vision.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'vision.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def create_product_set(self) -> Callable[
[product_search_service.CreateProductSetRequest],
Awaitable[product_search_service.ProductSet]]:
r"""Return a callable for the create product set method over gRPC.
Creates and returns a new ProductSet resource.
Possible errors:
- Returns INVALID_ARGUMENT if display_name is missing, or is
longer than 4096 characters.
Returns:
Callable[[~.CreateProductSetRequest],
Awaitable[~.ProductSet]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_product_set' not in self._stubs:
self._stubs['create_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/CreateProductSet',
request_serializer=product_search_service.CreateProductSetRequest.serialize,
response_deserializer=product_search_service.ProductSet.deserialize,
)
return self._stubs['create_product_set']
@property
def list_product_sets(self) -> Callable[
[product_search_service.ListProductSetsRequest],
Awaitable[product_search_service.ListProductSetsResponse]]:
r"""Return a callable for the list product sets method over gRPC.
Lists ProductSets in an unspecified order.
Possible errors:
- Returns INVALID_ARGUMENT if page_size is greater than 100, or
less than 1.
Returns:
Callable[[~.ListProductSetsRequest],
Awaitable[~.ListProductSetsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_product_sets' not in self._stubs:
self._stubs['list_product_sets'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/ListProductSets',
request_serializer=product_search_service.ListProductSetsRequest.serialize,
response_deserializer=product_search_service.ListProductSetsResponse.deserialize,
)
return self._stubs['list_product_sets']
@property
def get_product_set(self) -> Callable[
[product_search_service.GetProductSetRequest],
Awaitable[product_search_service.ProductSet]]:
r"""Return a callable for the get product set method over gRPC.
Gets information associated with a ProductSet.
Possible errors:
- Returns NOT_FOUND if the ProductSet does not exist.
Returns:
Callable[[~.GetProductSetRequest],
Awaitable[~.ProductSet]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_product_set' not in self._stubs:
self._stubs['get_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/GetProductSet',
request_serializer=product_search_service.GetProductSetRequest.serialize,
response_deserializer=product_search_service.ProductSet.deserialize,
)
return self._stubs['get_product_set']
@property
def update_product_set(self) -> Callable[
[product_search_service.UpdateProductSetRequest],
Awaitable[product_search_service.ProductSet]]:
r"""Return a callable for the update product set method over gRPC.
Makes changes to a ProductSet resource. Only display_name can be
updated currently.
Possible errors:
- Returns NOT_FOUND if the ProductSet does not exist.
- Returns INVALID_ARGUMENT if display_name is present in
update_mask but missing from the request or longer than 4096
characters.
Returns:
Callable[[~.UpdateProductSetRequest],
Awaitable[~.ProductSet]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_product_set' not in self._stubs:
self._stubs['update_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProductSet',
request_serializer=product_search_service.UpdateProductSetRequest.serialize,
response_deserializer=product_search_service.ProductSet.deserialize,
)
return self._stubs['update_product_set']
@property
def delete_product_set(self) -> Callable[
[product_search_service.DeleteProductSetRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete product set method over gRPC.
Permanently deletes a ProductSet. All Products and
ReferenceImages in the ProductSet will be deleted.
The actual image files are not deleted from Google Cloud
Storage.
Possible errors:
- Returns NOT_FOUND if the ProductSet does not exist.
Returns:
Callable[[~.DeleteProductSetRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_product_set' not in self._stubs:
self._stubs['delete_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProductSet',
request_serializer=product_search_service.DeleteProductSetRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_product_set']
@property
def create_product(self) -> Callable[
[product_search_service.CreateProductRequest],
Awaitable[product_search_service.Product]]:
r"""Return a callable for the create product method over gRPC.
Creates and returns a new product resource.
Possible errors:
- Returns INVALID_ARGUMENT if display_name is missing or longer
than 4096 characters.
- Returns INVALID_ARGUMENT if description is longer than 4096
characters.
- Returns INVALID_ARGUMENT if product_category is missing or
invalid.
Returns:
Callable[[~.CreateProductRequest],
Awaitable[~.Product]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_product' not in self._stubs:
self._stubs['create_product'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/CreateProduct',
request_serializer=product_search_service.CreateProductRequest.serialize,
response_deserializer=product_search_service.Product.deserialize,
)
return self._stubs['create_product']
@property
def list_products(self) -> Callable[
[product_search_service.ListProductsRequest],
Awaitable[product_search_service.ListProductsResponse]]:
r"""Return a callable for the list products method over gRPC.
Lists products in an unspecified order.
Possible errors:
- Returns INVALID_ARGUMENT if page_size is greater than 100 or
less than 1.
Returns:
Callable[[~.ListProductsRequest],
Awaitable[~.ListProductsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_products' not in self._stubs:
self._stubs['list_products'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/ListProducts',
request_serializer=product_search_service.ListProductsRequest.serialize,
response_deserializer=product_search_service.ListProductsResponse.deserialize,
)
return self._stubs['list_products']
@property
def get_product(self) -> Callable[
[product_search_service.GetProductRequest],
Awaitable[product_search_service.Product]]:
r"""Return a callable for the get product method over gRPC.
Gets information associated with a Product.
Possible errors:
- Returns NOT_FOUND if the Product does not exist.
Returns:
Callable[[~.GetProductRequest],
Awaitable[~.Product]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_product' not in self._stubs:
self._stubs['get_product'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/GetProduct',
request_serializer=product_search_service.GetProductRequest.serialize,
response_deserializer=product_search_service.Product.deserialize,
)
return self._stubs['get_product']
@property
def update_product(self) -> Callable[
[product_search_service.UpdateProductRequest],
Awaitable[product_search_service.Product]]:
r"""Return a callable for the update product method over gRPC.
Makes changes to a Product resource. Only display_name,
description and labels can be updated right now.
If labels are updated, the change will not be reflected in
queries until the next index time.
Possible errors:
- Returns NOT_FOUND if the Product does not exist.
- Returns INVALID_ARGUMENT if display_name is present in
update_mask but is missing from the request or longer than
4096 characters.
- Returns INVALID_ARGUMENT if description is present in
update_mask but is longer than 4096 characters.
- Returns INVALID_ARGUMENT if product_category is present in
update_mask.
Returns:
Callable[[~.UpdateProductRequest],
Awaitable[~.Product]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_product' not in self._stubs:
self._stubs['update_product'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProduct',
request_serializer=product_search_service.UpdateProductRequest.serialize,
response_deserializer=product_search_service.Product.deserialize,
)
return self._stubs['update_product']
@property
def delete_product(self) -> Callable[
[product_search_service.DeleteProductRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete product method over gRPC.
Permanently deletes a product and its reference images.
Metadata of the product and all its images will be deleted right
away, but search queries against ProductSets containing the
product may still work until all related caches are refreshed.
Possible errors:
- Returns NOT_FOUND if the product does not exist.
Returns:
Callable[[~.DeleteProductRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_product' not in self._stubs:
self._stubs['delete_product'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProduct',
request_serializer=product_search_service.DeleteProductRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_product']
@property
def create_reference_image(self) -> Callable[
[product_search_service.CreateReferenceImageRequest],
Awaitable[product_search_service.ReferenceImage]]:
r"""Return a callable for the create reference image method over gRPC.
Creates and returns a new ReferenceImage resource.
The ``bounding_poly`` field is optional. If ``bounding_poly`` is
not specified, the system will try to detect regions of interest
in the image that are compatible with the product_category on
the parent product. If it is specified, detection is ALWAYS
skipped. The system converts polygons into non-rotated
rectangles.
Note that the pipeline will resize the image if the image
resolution is too large to process (above 50MP).
Possible errors:
- Returns INVALID_ARGUMENT if the image_uri is missing or
longer than 4096 characters.
- Returns INVALID_ARGUMENT if the product does not exist.
- Returns INVALID_ARGUMENT if bounding_poly is not provided,
and nothing compatible with the parent product's
product_category is detected.
- Returns INVALID_ARGUMENT if bounding_poly contains more than
10 polygons.
Returns:
Callable[[~.CreateReferenceImageRequest],
Awaitable[~.ReferenceImage]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_reference_image' not in self._stubs:
self._stubs['create_reference_image'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/CreateReferenceImage',
request_serializer=product_search_service.CreateReferenceImageRequest.serialize,
response_deserializer=product_search_service.ReferenceImage.deserialize,
)
return self._stubs['create_reference_image']
@property
def delete_reference_image(self) -> Callable[
[product_search_service.DeleteReferenceImageRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete reference image method over gRPC.
Permanently deletes a reference image.
The image metadata will be deleted right away, but search
queries against ProductSets containing the image may still work
until all related caches are refreshed.
The actual image files are not deleted from Google Cloud
Storage.
Possible errors:
- Returns NOT_FOUND if the reference image does not exist.
Returns:
Callable[[~.DeleteReferenceImageRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_reference_image' not in self._stubs:
self._stubs['delete_reference_image'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/DeleteReferenceImage',
request_serializer=product_search_service.DeleteReferenceImageRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_reference_image']
@property
def list_reference_images(self) -> Callable[
[product_search_service.ListReferenceImagesRequest],
Awaitable[product_search_service.ListReferenceImagesResponse]]:
r"""Return a callable for the list reference images method over gRPC.
Lists reference images.
Possible errors:
- Returns NOT_FOUND if the parent product does not exist.
- Returns INVALID_ARGUMENT if the page_size is greater than
100, or less than 1.
Returns:
Callable[[~.ListReferenceImagesRequest],
Awaitable[~.ListReferenceImagesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_reference_images' not in self._stubs:
self._stubs['list_reference_images'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/ListReferenceImages',
request_serializer=product_search_service.ListReferenceImagesRequest.serialize,
response_deserializer=product_search_service.ListReferenceImagesResponse.deserialize,
)
return self._stubs['list_reference_images']
@property
def get_reference_image(self) -> Callable[
[product_search_service.GetReferenceImageRequest],
Awaitable[product_search_service.ReferenceImage]]:
r"""Return a callable for the get reference image method over gRPC.
Gets information associated with a ReferenceImage.
Possible errors:
- Returns NOT_FOUND if the specified image does not exist.
Returns:
Callable[[~.GetReferenceImageRequest],
Awaitable[~.ReferenceImage]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_reference_image' not in self._stubs:
self._stubs['get_reference_image'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/GetReferenceImage',
request_serializer=product_search_service.GetReferenceImageRequest.serialize,
response_deserializer=product_search_service.ReferenceImage.deserialize,
)
return self._stubs['get_reference_image']
@property
def add_product_to_product_set(self) -> Callable[
[product_search_service.AddProductToProductSetRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the add product to product set method over gRPC.
Adds a Product to the specified ProductSet. If the Product is
already present, no change is made.
One Product can be added to at most 100 ProductSets.
Possible errors:
- Returns NOT_FOUND if the Product or the ProductSet doesn't
exist.
Returns:
Callable[[~.AddProductToProductSetRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'add_product_to_product_set' not in self._stubs:
self._stubs['add_product_to_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/AddProductToProductSet',
request_serializer=product_search_service.AddProductToProductSetRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['add_product_to_product_set']
@property
def remove_product_from_product_set(self) -> Callable[
[product_search_service.RemoveProductFromProductSetRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the remove product from product
set method over gRPC.
Removes a Product from the specified ProductSet.
Possible errors:
- Returns NOT_FOUND If the Product is not found under the
ProductSet.
Returns:
Callable[[~.RemoveProductFromProductSetRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'remove_product_from_product_set' not in self._stubs:
self._stubs['remove_product_from_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/RemoveProductFromProductSet',
request_serializer=product_search_service.RemoveProductFromProductSetRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['remove_product_from_product_set']
@property
def list_products_in_product_set(self) -> Callable[
[product_search_service.ListProductsInProductSetRequest],
Awaitable[product_search_service.ListProductsInProductSetResponse]]:
r"""Return a callable for the list products in product set method over gRPC.
Lists the Products in a ProductSet, in an unspecified order. If
the ProductSet does not exist, the products field of the
response will be empty.
Possible errors:
- Returns INVALID_ARGUMENT if page_size is greater than 100 or
less than 1.
Returns:
Callable[[~.ListProductsInProductSetRequest],
Awaitable[~.ListProductsInProductSetResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_products_in_product_set' not in self._stubs:
self._stubs['list_products_in_product_set'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/ListProductsInProductSet',
request_serializer=product_search_service.ListProductsInProductSetRequest.serialize,
response_deserializer=product_search_service.ListProductsInProductSetResponse.deserialize,
)
return self._stubs['list_products_in_product_set']
@property
def import_product_sets(self) -> Callable[
[product_search_service.ImportProductSetsRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the import product sets method over gRPC.
Asynchronous API that imports a list of reference images to
specified product sets based on a list of image information.
The [google.longrunning.Operation][google.longrunning.Operation]
API can be used to keep track of the progress and results of the
request. ``Operation.metadata`` contains
``BatchOperationMetadata``. (progress) ``Operation.response``
contains ``ImportProductSetsResponse``. (results)
The input source of this method is a csv file on Google Cloud
Storage. For the format of the csv file please see
[ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1p3beta1.ImportProductSetsGcsSource.csv_file_uri].
Returns:
Callable[[~.ImportProductSetsRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'import_product_sets' not in self._stubs:
self._stubs['import_product_sets'] = self.grpc_channel.unary_unary(
'/google.cloud.vision.v1p3beta1.ProductSearch/ImportProductSets',
request_serializer=product_search_service.ImportProductSetsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['import_product_sets']
__all__ = (
'ProductSearchGrpcAsyncIOTransport',
)
|
SimonSapin/cairocffi
|
refs/heads/master
|
cairocffi/test_cairo.py
|
1
|
"""
cairocffi.tests
~~~~~~~~~~~~~~~
Test suite for cairocffi.
:copyright: Copyright 2013-2019 by Simon Sapin
:license: BSD, see LICENSE for details.
"""
import array
import base64
import contextlib
import gc
import io
import math
import os
import shutil
import sys
import tempfile
import cairocffi
import pytest
from . import (
PDF_METADATA_AUTHOR, PDF_METADATA_CREATE_DATE, PDF_METADATA_CREATOR,
PDF_METADATA_KEYWORDS, PDF_METADATA_MOD_DATE, PDF_METADATA_SUBJECT,
PDF_METADATA_TITLE, PDF_OUTLINE_FLAG_BOLD, PDF_OUTLINE_FLAG_OPEN,
PDF_OUTLINE_ROOT, SVG_UNIT_PC, SVG_UNIT_PT, SVG_UNIT_PX, TAG_LINK, Context,
FontFace, FontOptions, ImageSurface, LinearGradient, Matrix, Pattern,
PDFSurface, PSSurface, RadialGradient, RecordingSurface, ScaledFont,
SolidPattern, Surface, SurfacePattern, SVGSurface, ToyFontFace,
cairo_version, cairo_version_string)
if sys.byteorder == 'little':
def pixel(argb): # pragma: no cover
"""Convert a 4-byte ARGB string to native-endian."""
return argb[::-1]
else:
def pixel(argb): # pragma: no cover
"""Convert a 4-byte ARGB string to native-endian."""
return argb
@contextlib.contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp('é')
assert 'é' in tempdir # Test non-ASCII filenames
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def round_tuple(values):
return tuple(round(v, 6) for v in values)
def assert_raise_finished(func, *args, **kwargs):
with pytest.raises(cairocffi.CairoError) as exc:
func(*args, **kwargs)
assert 'SURFACE_FINISHED' in str(exc) or 'ExceptionInfo' in str(exc)
def test_cairo_version():
major, minor, micro = map(int, cairo_version_string().split('.'))
assert cairo_version() == major * 10000 + minor * 100 + micro
def test_install_as_pycairo():
cairocffi.install_as_pycairo()
import cairo
assert cairo is cairocffi
def test_image_surface():
assert ImageSurface.format_stride_for_width(
cairocffi.FORMAT_ARGB32, 100) == 400
assert ImageSurface.format_stride_for_width(
cairocffi.FORMAT_A8, 100) == 100
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 20, 30)
assert surface.get_format() == cairocffi.FORMAT_ARGB32
assert surface.get_width() == 20
assert surface.get_height() == 30
assert surface.get_stride() == 20 * 4
with pytest.raises(ValueError):
# buffer too small
data = array.array('B', b'\x00' * 799)
ImageSurface.create_for_data(data, cairocffi.FORMAT_ARGB32, 10, 20)
data = array.array('B', b'\x00' * 800)
surface = ImageSurface.create_for_data(data, cairocffi.FORMAT_ARGB32,
10, 20, stride=40)
context = Context(surface)
# The default source is opaque black:
assert context.get_source().get_rgba() == (0, 0, 0, 1)
context.paint_with_alpha(0.5)
assert data.tobytes() == pixel(b'\x80\x00\x00\x00') * 200
def test_image_bytearray_buffer():
if '__pypy__' in sys.modules:
pytest.xfail()
# Also test buffers through ctypes.c_char.from_buffer,
# not available on PyPy
data = bytearray(800)
surface = ImageSurface.create_for_data(data, cairocffi.FORMAT_ARGB32,
10, 20, stride=40)
Context(surface).paint_with_alpha(0.5)
assert data == pixel(b'\x80\x00\x00\x00') * 200
@pytest.mark.xfail(cairo_version() < 11200,
reason='Cairo version too low')
def test_surface_create_similar_image():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 20, 30)
similar = surface.create_similar_image(cairocffi.FORMAT_A8, 4, 100)
assert isinstance(similar, ImageSurface)
assert similar.get_content() == cairocffi.CONTENT_ALPHA
assert similar.get_format() == cairocffi.FORMAT_A8
assert similar.get_width() == 4
assert similar.get_height() == 100
@pytest.mark.xfail(cairo_version() < 11000,
reason='Cairo version too low')
def test_surface_create_for_rectangle():
surface = ImageSurface(cairocffi.FORMAT_A8, 4, 4)
data = surface.get_data()
assert data[:] == b'\x00' * 16
Context(surface.create_for_rectangle(1, 1, 2, 2)).paint()
assert data[:] == (
b'\x00\x00\x00\x00'
b'\x00\xFF\xFF\x00'
b'\x00\xFF\xFF\x00'
b'\x00\x00\x00\x00')
def test_surface():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 20, 30)
similar = surface.create_similar(cairocffi.CONTENT_ALPHA, 4, 100)
assert isinstance(similar, ImageSurface)
assert similar.get_content() == cairocffi.CONTENT_ALPHA
assert similar.get_format() == cairocffi.FORMAT_A8
assert similar.get_width() == 4
assert similar.get_height() == 100
assert similar.has_show_text_glyphs() is False
assert PDFSurface(None, 1, 1).has_show_text_glyphs() is True
surface.copy_page()
surface.show_page()
surface.mark_dirty()
surface.mark_dirty_rectangle(1, 2, 300, 12000)
surface.flush()
surface.set_device_offset(14, 3)
assert surface.get_device_offset() == (14, 3)
surface.set_fallback_resolution(15, 6)
assert surface.get_fallback_resolution() == (15, 6)
context = Context(surface)
assert isinstance(context.get_target(), ImageSurface)
surface_map = cairocffi.surfaces.SURFACE_TYPE_TO_CLASS
try:
del surface_map[cairocffi.SURFACE_TYPE_IMAGE]
target = context.get_target()
assert target._pointer == surface._pointer
assert isinstance(target, Surface)
assert not isinstance(target, ImageSurface)
finally:
surface_map[cairocffi.SURFACE_TYPE_IMAGE] = ImageSurface
surface.finish()
assert_raise_finished(surface.copy_page)
assert_raise_finished(surface.show_page)
assert_raise_finished(surface.set_device_offset, 1, 2)
assert_raise_finished(surface.set_fallback_resolution, 3, 4)
def test_target_lifetime():
# Test our work around for
# Related CFFI bug: https://bitbucket.org/cffi/cffi/issue/92/
if not hasattr(sys, 'getrefcount'):
pytest.xfail() # PyPy
gc.collect() # Clean up stuff from other tests
target = io.BytesIO()
initial_refcount = sys.getrefcount(target)
assert len(cairocffi.surfaces.KeepAlive.instances) == 0
surface = PDFSurface(target, 100, 100)
# The target is in a KeepAlive object
assert len(cairocffi.surfaces.KeepAlive.instances) == 1
assert sys.getrefcount(target) == initial_refcount + 1
del surface
gc.collect() # Make sure surface is collected
assert len(cairocffi.surfaces.KeepAlive.instances) == 0
assert sys.getrefcount(target) == initial_refcount
@pytest.mark.xfail(cairo_version() < 11000,
reason='Cairo version too low')
def test_mime_data():
surface = PDFSurface(None, 1, 1)
assert surface.get_mime_data('image/jpeg') is None
gc.collect() # Clean up KeepAlive stuff from other tests
assert len(cairocffi.surfaces.KeepAlive.instances) == 0
surface.set_mime_data('image/jpeg', b'lol')
assert len(cairocffi.surfaces.KeepAlive.instances) == 1
assert surface.get_mime_data('image/jpeg')[:] == b'lol'
surface.set_mime_data('image/jpeg', None)
assert len(cairocffi.surfaces.KeepAlive.instances) == 0
if cairo_version() >= 11200:
# This actually segfauts on cairo 1.10.x
assert surface.get_mime_data('image/jpeg') is None
surface.finish()
assert_raise_finished(surface.set_mime_data, 'image/jpeg', None)
@pytest.mark.xfail(cairo_version() < 11200,
reason='Cairo version too low')
def test_supports_mime_type():
# Also test we get actual booleans:
assert PDFSurface(None, 1, 1).supports_mime_type('image/jpeg') is True
surface = ImageSurface(cairocffi.FORMAT_A8, 1, 1)
assert surface.supports_mime_type('image/jpeg') is False
@pytest.mark.xfail(cairo_version() < 11400,
reason='Cairo version too low')
def test_device_scale():
surface = PDFSurface(None, 1, 1)
assert surface.get_device_scale() == (1, 1)
surface.set_device_scale(2, 3)
assert surface.get_device_scale() == (2, 3)
@pytest.mark.xfail(cairo_version() < 11504,
reason='Cairo version too low')
def test_metadata():
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
surface.set_metadata(PDF_METADATA_TITLE, 'title')
surface.set_metadata(PDF_METADATA_SUBJECT, 'subject')
surface.set_metadata(PDF_METADATA_CREATOR, 'creator')
surface.set_metadata(PDF_METADATA_AUTHOR, 'author')
surface.set_metadata(PDF_METADATA_KEYWORDS, 'keywords')
surface.set_metadata(PDF_METADATA_CREATE_DATE, '2013-07-21T23:46:00+01:00')
surface.set_metadata(PDF_METADATA_MOD_DATE, '2013-07-21T23:46:00Z')
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'/Title (title)' in pdf_bytes
assert b'/Subject (subject)' in pdf_bytes
assert b'/Creator (creator)' in pdf_bytes
assert b'/Author (author)' in pdf_bytes
assert b'/Keywords (keywords)' in pdf_bytes
assert b"/CreationDate (20130721234600+01'00)" in pdf_bytes
assert b'/ModDate (20130721234600Z)' in pdf_bytes
@pytest.mark.xfail(cairo_version() < 11504,
reason='Cairo version too low')
def test_outline():
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
outline = surface.add_outline(
PDF_OUTLINE_ROOT, 'title 1', 'page=1 pos=[1 1]',
PDF_OUTLINE_FLAG_OPEN & PDF_OUTLINE_FLAG_BOLD)
surface.add_outline(outline, 'title 2', 'page=1 pos=[1 1]')
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'/Title (title 1)' in pdf_bytes
assert b'/Title (title 2)' in pdf_bytes
@pytest.mark.xfail(cairo_version() < 11504,
reason='Cairo version too low')
def test_page_label():
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
surface.set_page_label('abc')
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'/P (abc)' in pdf_bytes
@pytest.mark.xfail(cairo_version() < 11504,
reason='Cairo version too low')
def test_tag():
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 10, 10)
context = Context(surface)
context.tag_begin('Document')
context.tag_begin(
TAG_LINK,
attributes='rect=[1 2 4 5] uri=\'https://cairocffi.readthedocs.io/\'')
context.set_source_rgba(1, 0, .5, 1)
context.rectangle(2, 3, 4, 5)
context.fill()
context.tag_end(TAG_LINK)
context.tag_end('Document')
context.show_page()
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'/URI (https://cairocffi.readthedocs.io/)' in pdf_bytes
assert b'/S /Document' in pdf_bytes
@pytest.mark.xfail(cairo_version() < 11504,
reason='Cairo version too low')
def test_thumbnail_size():
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
surface.set_thumbnail_size(1, 1)
surface.finish()
pdf_bytes1 = file_obj.getvalue()
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
surface.set_thumbnail_size(9, 9)
surface.finish()
pdf_bytes2 = file_obj.getvalue()
assert len(pdf_bytes1) < len(pdf_bytes2)
@pytest.mark.xfail(cairo_version() < 11510,
reason='Cairo version too low')
def test_document_unit():
surface = SVGSurface(None, 1, 2)
assert surface.get_document_unit() == SVG_UNIT_PT
file_obj = io.BytesIO()
surface = SVGSurface(file_obj, 1, 2)
surface.set_document_unit(SVG_UNIT_PX)
assert surface.get_document_unit() == SVG_UNIT_PX
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'width="1px"' in pdf_bytes
assert b'height="2px"' in pdf_bytes
file_obj = io.BytesIO()
surface = SVGSurface(file_obj, 1, 2)
surface.set_document_unit(SVG_UNIT_PC)
assert surface.get_document_unit() == SVG_UNIT_PC
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'width="1pc"' in pdf_bytes
assert b'height="2pc"' in pdf_bytes
def test_png():
png_bytes = base64.b64decode(
b'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVQI12O'
b'w69x7BgAE3gJRgNit0AAAAABJRU5ErkJggg==')
png_magic_number = png_bytes[:8]
with temp_directory() as tempdir:
filename = os.path.join(tempdir, 'foo.png')
filename_bytes = filename.encode(sys.getfilesystemencoding())
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
surface.write_to_png(filename)
with open(filename, 'rb') as fd:
written_png_bytes = fd.read()
assert written_png_bytes.startswith(png_magic_number)
open(filename, 'wb').close()
with open(filename, 'rb') as fd:
assert fd.read() == b''
surface.write_to_png(filename_bytes)
with open(filename, 'rb') as fd:
assert fd.read() == written_png_bytes
file_obj = io.BytesIO()
surface.write_to_png(file_obj)
assert file_obj.getvalue() == written_png_bytes
assert surface.write_to_png() == written_png_bytes
with open(filename, 'wb') as fd:
fd.write(png_bytes)
for source in [io.BytesIO(png_bytes), filename, filename_bytes]:
surface = ImageSurface.create_from_png(source)
assert surface.get_format() == cairocffi.FORMAT_ARGB32
assert surface.get_width() == 1
assert surface.get_height() == 1
assert surface.get_stride() == 4
assert surface.get_data()[:] == pixel(b'\xcc\x32\x6e\x97')
with pytest.raises(IOError):
# Truncated input
surface = ImageSurface.create_from_png(io.BytesIO(png_bytes[:30]))
with pytest.raises(IOError):
surface = ImageSurface.create_from_png(io.BytesIO(b''))
@pytest.mark.xfail(cairo_version() < 11000,
reason='Cairo version too low')
def test_pdf_versions():
assert set(PDFSurface.get_versions()) >= set([
cairocffi.PDF_VERSION_1_4, cairocffi.PDF_VERSION_1_5])
assert PDFSurface.version_to_string(cairocffi.PDF_VERSION_1_4) == 'PDF 1.4'
with pytest.raises(TypeError):
PDFSurface.version_to_string('PDF_VERSION_42')
with pytest.raises(ValueError):
PDFSurface.version_to_string(42)
file_obj = io.BytesIO()
PDFSurface(file_obj, 1, 1).finish()
assert file_obj.getvalue().startswith(b'%PDF-1.5')
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
surface.restrict_to_version(cairocffi.PDF_VERSION_1_4)
surface.finish()
assert file_obj.getvalue().startswith(b'%PDF-1.4')
def test_pdf_surface():
with temp_directory() as tempdir:
filename = os.path.join(tempdir, 'foo.pdf')
filename_bytes = filename.encode(sys.getfilesystemencoding())
file_obj = io.BytesIO()
for target in [filename, filename_bytes, file_obj, None]:
surface = PDFSurface(target, 123, 432)
surface.finish()
with open(filename, 'rb') as fd:
assert fd.read().startswith(b'%PDF')
with open(filename_bytes, 'rb') as fd:
assert fd.read().startswith(b'%PDF')
pdf_bytes = file_obj.getvalue()
assert pdf_bytes.startswith(b'%PDF')
assert b'/MediaBox [ 0 0 123 432 ]' in pdf_bytes
assert pdf_bytes.count(b'/Type /Pages') == 1
assert pdf_bytes.count(b'/Type /Page') == 2
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 1, 1)
context = Context(surface)
surface.set_size(12, 100)
context.show_page()
surface.set_size(42, 700)
context.copy_page()
surface.finish()
pdf_bytes = file_obj.getvalue()
assert b'/MediaBox [ 0 0 1 1 ]' not in pdf_bytes
assert b'/MediaBox [ 0 0 12 100 ]' in pdf_bytes
assert b'/MediaBox [ 0 0 42 700 ]' in pdf_bytes
assert pdf_bytes.count(b'/Type /Pages') == 1
assert pdf_bytes.count(b'/Type /Page') == 3
def test_svg_surface():
assert set(SVGSurface.get_versions()) >= set([
cairocffi.SVG_VERSION_1_1, cairocffi.SVG_VERSION_1_2])
assert SVGSurface.version_to_string(cairocffi.SVG_VERSION_1_1) == 'SVG 1.1'
with pytest.raises(TypeError):
SVGSurface.version_to_string('SVG_VERSION_42')
with pytest.raises(ValueError):
SVGSurface.version_to_string(42)
with temp_directory() as tempdir:
filename = os.path.join(tempdir, 'foo.svg')
filename_bytes = filename.encode(sys.getfilesystemencoding())
file_obj = io.BytesIO()
for target in [filename, filename_bytes, file_obj, None]:
SVGSurface(target, 123, 432).finish()
with open(filename, 'rb') as fd:
assert fd.read().startswith(b'<?xml')
with open(filename_bytes, 'rb') as fd:
assert fd.read().startswith(b'<?xml')
svg_bytes = file_obj.getvalue()
assert svg_bytes.startswith(b'<?xml')
assert b'viewBox="0 0 123 432"' in svg_bytes
surface = SVGSurface(None, 1, 1)
# Not obvious to test
surface.restrict_to_version(cairocffi.SVG_VERSION_1_1)
def test_ps_surface():
assert set(PSSurface.get_levels()) >= set([
cairocffi.PS_LEVEL_2, cairocffi.PS_LEVEL_3])
assert PSSurface.ps_level_to_string(cairocffi.PS_LEVEL_3) == 'PS Level 3'
with pytest.raises(TypeError):
PSSurface.ps_level_to_string('PS_LEVEL_42')
with pytest.raises(ValueError):
PSSurface.ps_level_to_string(42)
with temp_directory() as tempdir:
filename = os.path.join(tempdir, 'foo.ps')
filename_bytes = filename.encode(sys.getfilesystemencoding())
file_obj = io.BytesIO()
for target in [filename, filename_bytes, file_obj, None]:
PSSurface(target, 123, 432).finish()
with open(filename, 'rb') as fd:
assert fd.read().startswith(b'%!PS')
with open(filename_bytes, 'rb') as fd:
assert fd.read().startswith(b'%!PS')
assert file_obj.getvalue().startswith(b'%!PS')
file_obj = io.BytesIO()
surface = PSSurface(file_obj, 1, 1)
surface.restrict_to_level(cairocffi.PS_LEVEL_2) # Not obvious to test
assert surface.get_eps() is False
surface.set_eps('lol')
assert surface.get_eps() is True
surface.set_eps('')
assert surface.get_eps() is False
surface.set_size(10, 12)
surface.dsc_comment('%%Lorem')
surface.dsc_begin_setup()
surface.dsc_comment('%%ipsum')
surface.dsc_begin_page_setup()
surface.dsc_comment('%%dolor')
surface.finish()
ps_bytes = file_obj.getvalue()
assert b'%%Lorem' in ps_bytes
assert b'%%ipsum' in ps_bytes
assert b'%%dolor' in ps_bytes
@pytest.mark.xfail(cairo_version() < 11000,
reason='Cairo version too low')
def _recording_surface_common(extents):
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 100)
empty_pixels = surface.get_data()[:]
assert empty_pixels == b'\x00' * 40000
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 100)
context = Context(surface)
context.move_to(20, 50)
context.show_text('Something about us.')
text_pixels = surface.get_data()[:]
assert text_pixels != empty_pixels
recording_surface = RecordingSurface(cairocffi.CONTENT_COLOR_ALPHA,
extents)
context = Context(recording_surface)
context.move_to(20, 50)
assert recording_surface.ink_extents() == (0, 0, 0, 0)
context.show_text('Something about us.')
recording_surface.flush()
assert recording_surface.ink_extents() != (0, 0, 0, 0)
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 100)
context = Context(surface)
context.set_source_surface(recording_surface)
context.paint()
recorded_pixels = surface.get_data()[:]
return text_pixels, recorded_pixels
def test_recording_surface():
text_pixels, recorded_pixels = _recording_surface_common((0, 0, 140, 80))
assert recorded_pixels == text_pixels
@pytest.mark.xfail(cairo_version() < 11200,
reason='Cairo version too low')
def test_unbounded_recording_surface():
text_pixels, recorded_pixels = _recording_surface_common(None)
assert recorded_pixels == text_pixels
@pytest.mark.xfail(cairo_version() < 11200,
reason='Cairo version too low')
def test_recording_surface_get_extents():
for extents in [None, (0, 0, 140, 80)]:
surface = RecordingSurface(cairocffi.CONTENT_COLOR_ALPHA, extents)
assert surface.get_extents() == extents
def test_matrix():
m = Matrix()
with pytest.raises(AttributeError):
m.some_inexistent_attribute
assert m.as_tuple() == (1, 0, 0, 1, 0, 0)
m.translate(12, 4)
assert m.as_tuple() == (1, 0, 0, 1, 12, 4)
m.scale(2, 7)
assert m.as_tuple() == (2, 0, 0, 7, 12, 4)
assert m[3] == 7
assert m.yy == 7
m.yy = 3
assert m.as_tuple() == (2, 0, 0, 3, 12, 4)
assert repr(m) == 'Matrix(2, 0, 0, 3, 12, 4)'
assert str(m) == 'Matrix(2, 0, 0, 3, 12, 4)'
assert m.transform_distance(1, 2) == (2, 6)
assert m.transform_point(1, 2) == (14, 10)
m2 = m.copy()
assert m2 == m
m2.invert()
assert m2.as_tuple() == (0.5, 0, 0, 1./3, -12 / 2, -4. / 3)
assert m.inverted() == m2
assert m.as_tuple() == (2, 0, 0, 3, 12, 4) # Unchanged
m2 = Matrix(*m)
assert m2 == m
m2.invert()
assert m2.as_tuple() == (0.5, 0, 0, 1./3, -12 / 2, -4. / 3)
assert m.inverted() == m2
assert m.as_tuple() == (2, 0, 0, 3, 12, 4) # Still unchanged
m.rotate(math.pi / 2)
assert round_tuple(m.as_tuple()) == (0, 3, -2, 0, 12, 4)
m *= Matrix.init_rotate(math.pi)
assert round_tuple(m.as_tuple()) == (0, -3, 2, 0, -12, -4)
def test_surface_pattern():
surface = ImageSurface(cairocffi.FORMAT_A1, 1, 1)
pattern = SurfacePattern(surface)
surface_again = pattern.get_surface()
assert surface_again is not surface
assert surface_again._pointer == surface._pointer
assert pattern.get_extend() == cairocffi.EXTEND_NONE
pattern.set_extend(cairocffi.EXTEND_REPEAT)
assert pattern.get_extend() == cairocffi.EXTEND_REPEAT
assert pattern.get_filter() == cairocffi.FILTER_GOOD
pattern.set_filter(cairocffi.FILTER_BEST)
assert pattern.get_filter() == cairocffi.FILTER_BEST
assert pattern.get_matrix() == Matrix() # identity
matrix = Matrix.init_rotate(0.5)
pattern.set_matrix(matrix)
assert pattern.get_matrix() == matrix
assert pattern.get_matrix() != Matrix()
def test_solid_pattern():
assert SolidPattern(1, .5, .25).get_rgba() == (1, .5, .25, 1)
assert SolidPattern(1, .5, .25, .75).get_rgba() == (1, .5, .25, .75)
surface = PDFSurface(None, 1, 1)
context = Context(surface)
pattern = SolidPattern(1, .5, .25)
context.set_source(pattern)
assert isinstance(context.get_source(), SolidPattern)
pattern_map = cairocffi.patterns.PATTERN_TYPE_TO_CLASS
try:
del pattern_map[cairocffi.PATTERN_TYPE_SOLID]
re_pattern = context.get_source()
assert re_pattern._pointer == pattern._pointer
assert isinstance(re_pattern, Pattern)
assert not isinstance(re_pattern, SolidPattern)
finally:
pattern_map[cairocffi.PATTERN_TYPE_SOLID] = SolidPattern
def pdf_with_pattern(pattern=None):
file_obj = io.BytesIO()
surface = PDFSurface(file_obj, 100, 100)
context = Context(surface)
if pattern is not None:
context.set_source(pattern)
context.paint()
surface.finish()
return file_obj.getvalue()
def test_linear_gradient():
gradient = LinearGradient(1, 2, 10, 20)
assert gradient.get_linear_points() == (1, 2, 10, 20)
gradient.add_color_stop_rgb(1, 1, .5, .25)
gradient.add_color_stop_rgb(offset=.5, red=1, green=.5, blue=.25)
gradient.add_color_stop_rgba(.5, 1, .5, .75, .25)
assert gradient.get_color_stops() == [
(.5, 1, .5, .25, 1),
(.5, 1, .5, .75, .25),
(1, 1, .5, .25, 1)]
# Values chosen so that we can test get_data() bellow with an exact
# byte string that (hopefully) does not depend on rounding behavior:
# 255 / 5. == 51.0 == 0x33
surface = ImageSurface(cairocffi.FORMAT_A8, 8, 4)
assert surface.get_data()[:] == b'\x00' * 32
gradient = LinearGradient(1.5, 0, 6.5, 0)
gradient.add_color_stop_rgba(0, 0, 0, 0, 0)
gradient.add_color_stop_rgba(1, 0, 0, 0, 1)
context = Context(surface)
context.set_source(gradient)
context.paint()
assert surface.get_data()[:] == b'\x00\x00\x33\x66\x99\xCC\xFF\xFF' * 4
assert b'/ShadingType 2' not in pdf_with_pattern()
assert b'/ShadingType 2' in pdf_with_pattern(gradient)
def test_radial_gradient():
gradient = RadialGradient(42, 420, 10, 43, 430, 100)
assert gradient.get_radial_circles() == (42, 420, 10, 43, 430, 100)
gradient.add_color_stop_rgb(1, 1, .5, .25)
gradient.add_color_stop_rgb(offset=.5, red=1, green=.5, blue=.25)
gradient.add_color_stop_rgba(.5, 1, .5, .75, .25)
assert gradient.get_color_stops() == [
(.5, 1, .5, .25, 1),
(.5, 1, .5, .75, .25),
(1, 1, .5, .25, 1)]
assert b'/ShadingType 3' not in pdf_with_pattern()
assert b'/ShadingType 3' in pdf_with_pattern(gradient)
def test_context_as_context_manager():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
context = Context(surface)
# The default source is opaque black:
assert context.get_source().get_rgba() == (0, 0, 0, 1)
with context:
context.set_source_rgb(1, .25, .5)
assert context.get_source().get_rgba() == (1, .25, .5, 1)
# Context restored at the end of with statement.
assert context.get_source().get_rgba() == (0, 0, 0, 1)
try:
with context:
context.set_source_rgba(1, .25, .75, .5)
assert context.get_source().get_rgba() == (1, .25, .75, .5)
raise ValueError
except ValueError:
pass
# Context also restored on exceptions.
assert context.get_source().get_rgba() == (0, 0, 0, 1)
def test_context_groups():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
context = Context(surface)
assert isinstance(context.get_target(), ImageSurface)
assert context.get_target()._pointer == surface._pointer
assert context.get_group_target()._pointer == surface._pointer
assert (context.get_group_target().get_content() ==
cairocffi.CONTENT_COLOR_ALPHA)
assert surface.get_data()[:] == pixel(b'\x00\x00\x00\x00')
with context:
context.push_group_with_content(cairocffi.CONTENT_ALPHA)
assert (context.get_group_target().get_content() ==
cairocffi.CONTENT_ALPHA)
context.set_source_rgba(1, .2, .4, .8) # Only A is actually used
assert isinstance(context.get_source(), SolidPattern)
context.paint()
context.pop_group_to_source()
assert isinstance(context.get_source(), SurfacePattern)
# Still nothing on the original surface
assert surface.get_data()[:] == pixel(b'\x00\x00\x00\x00')
context.paint()
assert surface.get_data()[:] == pixel(b'\xCC\x00\x00\x00')
with context:
context.push_group()
context.set_source_rgba(1, .2, .4)
context.paint()
group = context.pop_group()
assert isinstance(context.get_source(), SolidPattern)
assert isinstance(group, SurfacePattern)
context.set_source_surface(group.get_surface())
assert surface.get_data()[:] == pixel(b'\xCC\x00\x00\x00')
context.paint()
assert surface.get_data()[:] == pixel(b'\xFF\xFF\x33\x66')
def test_context_current_transform_matrix():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
context = Context(surface)
assert isinstance(context.get_matrix(), Matrix)
assert context.get_matrix().as_tuple() == (1, 0, 0, 1, 0, 0)
context.translate(6, 5)
assert context.get_matrix().as_tuple() == (1, 0, 0, 1, 6, 5)
context.scale(1, 6)
assert context.get_matrix().as_tuple() == (1, 0, 0, 6, 6, 5)
context.scale(.5)
assert context.get_matrix().as_tuple() == (.5, 0, 0, 3, 6, 5)
context.rotate(math.pi / 2)
assert round_tuple(context.get_matrix().as_tuple()) == (0, 3, -.5, 0, 6, 5)
context.identity_matrix()
assert context.get_matrix().as_tuple() == (1, 0, 0, 1, 0, 0)
context.set_matrix(Matrix(2, 1, 3, 7, 8, 2))
assert context.get_matrix().as_tuple() == (2, 1, 3, 7, 8, 2)
context.transform(Matrix(2, 0, 0, .5, 0, 0))
assert context.get_matrix().as_tuple() == (4, 2, 1.5, 3.5, 8, 2)
context.set_matrix(Matrix(2, 0, 0, 3, 12, 4))
assert context.user_to_device_distance(1, 2) == (2, 6)
assert context.user_to_device(1, 2) == (14, 10)
assert context.device_to_user_distance(2, 6) == (1, 2)
assert round_tuple(context.device_to_user(14, 10)) == (1, 2)
def test_context_path():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
context = Context(surface)
assert context.copy_path() == []
assert context.has_current_point() is False
assert context.get_current_point() == (0, 0)
context.arc(100, 200, 20, math.pi/2, 0)
path_1 = context.copy_path()
assert path_1[0] == (cairocffi.PATH_MOVE_TO, (100, 220))
assert len(path_1) > 1
assert all(part[0] == cairocffi.PATH_CURVE_TO for part in path_1[1:])
assert context.has_current_point() is True
assert context.get_current_point() == (120, 200)
context.new_sub_path()
assert context.copy_path() == path_1
assert context.has_current_point() is False
assert context.get_current_point() == (0, 0)
context.new_path()
assert context.copy_path() == []
assert context.has_current_point() is False
assert context.get_current_point() == (0, 0)
context.arc_negative(100, 200, 20, math.pi/2, 0)
path_2 = context.copy_path()
assert path_2[0] == (cairocffi.PATH_MOVE_TO, (100, 220))
assert len(path_2) > 1
assert all(part[0] == cairocffi.PATH_CURVE_TO for part in path_2[1:])
assert path_2 != path_1
context.new_path()
context.rectangle(10, 20, 100, 200)
path = context.copy_path()
# Some cairo versions add a MOVE_TO after a CLOSE_PATH
if path[-1] == (cairocffi.PATH_MOVE_TO, (10, 20)): # pragma: no cover
path = path[:-1]
assert path == [
(cairocffi.PATH_MOVE_TO, (10, 20)),
(cairocffi.PATH_LINE_TO, (110, 20)),
(cairocffi.PATH_LINE_TO, (110, 220)),
(cairocffi.PATH_LINE_TO, (10, 220)),
(cairocffi.PATH_CLOSE_PATH, ())]
assert context.path_extents() == (10, 20, 110, 220)
context.new_path()
context.move_to(10, 20)
context.line_to(10, 30)
context.rel_move_to(2, 5)
context.rel_line_to(2, 5)
context.curve_to(20, 30, 70, 50, 100, 120)
context.rel_curve_to(20, 30, 70, 50, 100, 120)
context.close_path()
path = context.copy_path()
if path[-1] == (cairocffi.PATH_MOVE_TO, (12, 35)): # pragma: no cover
path = path[:-1]
assert path == [
(cairocffi.PATH_MOVE_TO, (10, 20)),
(cairocffi.PATH_LINE_TO, (10, 30)),
(cairocffi.PATH_MOVE_TO, (12, 35)),
(cairocffi.PATH_LINE_TO, (14, 40)),
(cairocffi.PATH_CURVE_TO, (20, 30, 70, 50, 100, 120)),
(cairocffi.PATH_CURVE_TO, (120, 150, 170, 170, 200, 240)),
(cairocffi.PATH_CLOSE_PATH, ())]
context.new_path()
context.move_to(10, 15)
context.curve_to(20, 30, 70, 50, 100, 120)
assert context.copy_path() == [
(cairocffi.PATH_MOVE_TO, (10, 15)),
(cairocffi.PATH_CURVE_TO, (20, 30, 70, 50, 100, 120))]
path = context.copy_path_flat()
assert len(path) > 2
assert path[0] == (cairocffi.PATH_MOVE_TO, (10, 15))
assert all(part[0] == cairocffi.PATH_LINE_TO for part in path[1:])
assert path[-1] == (cairocffi.PATH_LINE_TO, (100, 120))
context.new_path()
context.move_to(10, 20)
context.line_to(10, 30)
path = context.copy_path()
assert path == [
(cairocffi.PATH_MOVE_TO, (10, 20)),
(cairocffi.PATH_LINE_TO, (10, 30))]
additional_path = [(cairocffi.PATH_LINE_TO, (30, 150))]
context.append_path(additional_path)
assert context.copy_path() == path + additional_path
# Incorrect number of points:
with pytest.raises(ValueError):
context.append_path([(cairocffi.PATH_LINE_TO, (30, 150, 1))])
with pytest.raises(ValueError):
context.append_path([(cairocffi.PATH_LINE_TO, (30, 150, 1, 4))])
def test_context_properties():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 1, 1)
context = Context(surface)
assert context.get_antialias() == cairocffi.ANTIALIAS_DEFAULT
context.set_antialias(cairocffi.ANTIALIAS_BEST)
assert context.get_antialias() == cairocffi.ANTIALIAS_BEST
assert context.get_dash() == ([], 0)
context.set_dash([4, 1, 3, 2], 1.5)
assert context.get_dash() == ([4, 1, 3, 2], 1.5)
assert context.get_dash_count() == 4
assert context.get_fill_rule() == cairocffi.FILL_RULE_WINDING
context.set_fill_rule(cairocffi.FILL_RULE_EVEN_ODD)
assert context.get_fill_rule() == cairocffi.FILL_RULE_EVEN_ODD
assert context.get_line_cap() == cairocffi.LINE_CAP_BUTT
context.set_line_cap(cairocffi.LINE_CAP_SQUARE)
assert context.get_line_cap() == cairocffi.LINE_CAP_SQUARE
assert context.get_line_join() == cairocffi.LINE_JOIN_MITER
context.set_line_join(cairocffi.LINE_JOIN_ROUND)
assert context.get_line_join() == cairocffi.LINE_JOIN_ROUND
assert context.get_line_width() == 2
context.set_line_width(13)
assert context.get_line_width() == 13
assert context.get_miter_limit() == 10
context.set_miter_limit(4)
assert context.get_miter_limit() == 4
assert context.get_operator() == cairocffi.OPERATOR_OVER
context.set_operator(cairocffi.OPERATOR_XOR)
assert context.get_operator() == cairocffi.OPERATOR_XOR
assert context.get_tolerance() == 0.1
context.set_tolerance(0.25)
assert context.get_tolerance() == 0.25
def test_context_fill():
surface = ImageSurface(cairocffi.FORMAT_A8, 4, 4)
assert surface.get_data()[:] == b'\x00' * 16
context = Context(surface)
context.set_source_rgba(0, 0, 0, .5)
context.set_line_width(.5)
context.rectangle(1, 1, 2, 2)
assert context.fill_extents() == (1, 1, 3, 3)
assert context.stroke_extents() == (.75, .75, 3.25, 3.25)
assert context.in_fill(2, 2) is True
assert context.in_fill(.8, 2) is False
assert context.in_stroke(2, 2) is False
assert context.in_stroke(.8, 2) is True
path = list(context.copy_path())
assert path
context.fill_preserve()
assert list(context.copy_path()) == path
assert surface.get_data()[:] == (
b'\x00\x00\x00\x00'
b'\x00\x80\x80\x00'
b'\x00\x80\x80\x00'
b'\x00\x00\x00\x00'
)
context.fill()
assert list(context.copy_path()) == []
assert surface.get_data()[:] == (
b'\x00\x00\x00\x00'
b'\x00\xC0\xC0\x00'
b'\x00\xC0\xC0\x00'
b'\x00\x00\x00\x00'
)
def test_context_stroke():
for preserve in [True, False]:
surface = ImageSurface(cairocffi.FORMAT_A8, 4, 4)
assert surface.get_data()[:] == b'\x00' * 16
context = Context(surface)
context.set_source_rgba(0, 0, 0, 1)
context.set_line_width(1)
context.rectangle(.5, .5, 2, 2)
path = list(context.copy_path())
assert path
context.stroke_preserve() if preserve else context.stroke()
assert list(context.copy_path()) == (path if preserve else [])
assert surface.get_data()[:] == (
b'\xFF\xFF\xFF\x00'
b'\xFF\x00\xFF\x00'
b'\xFF\xFF\xFF\x00'
b'\x00\x00\x00\x00')
def test_context_clip():
surface = ImageSurface(cairocffi.FORMAT_A8, 4, 4)
assert surface.get_data()[:] == b'\x00' * 16
context = Context(surface)
context.rectangle(1, 1, 2, 2)
assert context.clip_extents() == (0, 0, 4, 4)
path = list(context.copy_path())
assert path
context.clip_preserve()
assert list(context.copy_path()) == path
assert context.clip_extents() == (1, 1, 3, 3)
context.clip()
assert list(context.copy_path()) == []
assert context.clip_extents() == (1, 1, 3, 3)
context.reset_clip()
assert context.clip_extents() == (0, 0, 4, 4)
context.rectangle(1, 1, 2, 2)
context.rectangle(1, 2, 1, 2)
context.clip()
assert context.copy_clip_rectangle_list() == [(1, 1, 2, 2), (1, 3, 1, 1)]
assert context.clip_extents() == (1, 1, 3, 4)
@pytest.mark.xfail(cairo_version() < 11000,
reason='Cairo version too low')
def test_context_in_clip():
surface = ImageSurface(cairocffi.FORMAT_A8, 4, 4)
context = Context(surface)
context.rectangle(1, 1, 2, 2)
assert context.in_clip(.5, 2) is True
assert context.in_clip(1.5, 2) is True
context.clip()
assert context.in_clip(.5, 2) is False
assert context.in_clip(1.5, 2) is True
def test_context_mask():
mask_surface = ImageSurface(cairocffi.FORMAT_ARGB32, 2, 2)
context = Context(mask_surface)
context.set_source_rgba(1, 0, .5, 1)
context.rectangle(0, 0, 1, 1)
context.fill()
context.set_source_rgba(1, .5, 1, .5)
context.rectangle(1, 1, 1, 1)
context.fill()
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 4, 4)
context = Context(surface)
context.mask(SurfacePattern(mask_surface))
o = pixel(b'\x00\x00\x00\x00')
b = pixel(b'\x80\x00\x00\x00')
B = pixel(b'\xFF\x00\x00\x00')
assert surface.get_data()[:] == (
B + o + o + o +
o + b + o + o +
o + o + o + o +
o + o + o + o
)
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 4, 4)
context = Context(surface)
context.mask_surface(mask_surface, surface_x=1, surface_y=2)
o = pixel(b'\x00\x00\x00\x00')
b = pixel(b'\x80\x00\x00\x00')
B = pixel(b'\xFF\x00\x00\x00')
assert surface.get_data()[:] == (
o + o + o + o +
o + o + o + o +
o + B + o + o +
o + o + b + o
)
def test_context_font():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 10, 10)
context = Context._from_pointer(Context(surface)._pointer, incref=True)
assert context.get_font_matrix().as_tuple() == (10, 0, 0, 10, 0, 0)
context.set_font_matrix(Matrix(2, 0, 0, 3, 12, 4))
assert context.get_font_matrix().as_tuple() == (2, 0, 0, 3, 12, 4)
context.set_font_size(14)
assert context.get_font_matrix().as_tuple() == (14, 0, 0, 14, 0, 0)
context.set_font_size(10)
context.select_font_face(b'@cairo:serif', cairocffi.FONT_SLANT_ITALIC)
font_face = context.get_font_face()
assert isinstance(font_face, ToyFontFace)
assert font_face.get_family() == '@cairo:serif'
assert font_face.get_slant() == cairocffi.FONT_SLANT_ITALIC
assert font_face.get_weight() == cairocffi.FONT_WEIGHT_NORMAL
try:
del cairocffi.fonts.FONT_TYPE_TO_CLASS[cairocffi.FONT_TYPE_TOY]
re_font_face = context.get_font_face()
assert re_font_face._pointer == font_face._pointer
assert isinstance(re_font_face, FontFace)
assert not isinstance(re_font_face, ToyFontFace)
finally:
cairocffi.fonts.FONT_TYPE_TO_CLASS[cairocffi.FONT_TYPE_TOY] = \
ToyFontFace
ascent, descent, height, max_x_advance, max_y_advance = (
context.font_extents())
# That’s about all we can assume for a default font.
assert max_x_advance > 0
assert max_y_advance == 0
_, _, _, _, x_advance, y_advance = context.text_extents('i' * 10)
assert x_advance > 0
assert y_advance == 0
context.set_font_face(
ToyFontFace('@cairo:monospace', weight=cairocffi.FONT_WEIGHT_BOLD))
_, _, _, _, x_advance_mono, y_advance = context.text_extents('i' * 10)
assert x_advance_mono > x_advance
assert y_advance == 0
assert list(context.copy_path()) == []
context.text_path('a')
assert list(context.copy_path())
assert surface.get_data()[:] == b'\x00' * 400
context.move_to(1, 9)
context.show_text('a')
assert surface.get_data()[:] != b'\x00' * 400
assert (context.get_font_options().get_hint_metrics() ==
cairocffi.HINT_METRICS_DEFAULT)
context.set_font_options(
FontOptions(hint_metrics=cairocffi.HINT_METRICS_ON))
assert (context.get_font_options().get_hint_metrics() ==
cairocffi.HINT_METRICS_ON)
assert (surface.get_font_options().get_hint_metrics() ==
cairocffi.HINT_METRICS_ON)
context.set_font_matrix(Matrix(2, 0, 0, 3, 12, 4))
assert context.get_scaled_font().get_font_matrix().as_tuple() == (
2, 0, 0, 3, 12, 4)
context.set_scaled_font(ScaledFont(ToyFontFace(), font_matrix=Matrix(
0, 1, 4, 0, 12, 4)))
assert context.get_font_matrix().as_tuple() == (0, 1, 4, 0, 12, 4)
# Reset the default
context.set_font_face(None)
# TODO: test this somehow.
def test_scaled_font():
font = ScaledFont(ToyFontFace())
font_extents = font.extents()
ascent, descent, height, max_x_advance, max_y_advance = font_extents
assert max_x_advance > 0
assert max_y_advance == 0
_, _, _, _, x_advance, y_advance = font.text_extents('i' * 10)
assert x_advance > 0
assert y_advance == 0
font = ScaledFont(ToyFontFace('@cairo:serif'))
_, _, _, _, x_advance, y_advance = font.text_extents('i' * 10)
font = ScaledFont(ToyFontFace('@cairo:monospace'))
_, _, _, _, x_advance_mono, y_advance = font.text_extents('i' * 10)
assert x_advance_mono > x_advance
assert y_advance == 0
# Not much we can test:
# The toy font face was "materialized" into a specific backend.
assert isinstance(font.get_font_face(), FontFace)
font = ScaledFont(
ToyFontFace('@cairo:monospace'),
Matrix(xx=20, yy=20), Matrix(xx=3, yy=.5),
FontOptions(antialias=cairocffi.ANTIALIAS_BEST))
assert font.get_font_options().get_antialias() == cairocffi.ANTIALIAS_BEST
assert font.get_font_matrix().as_tuple() == (20, 0, 0, 20, 0, 0)
assert font.get_ctm().as_tuple() == (3, 0, 0, .5, 0, 0)
assert font.get_scale_matrix().as_tuple() == (60, 0, 0, 10, 0, 0)
_, _, _, _, x_advance_mono_2, y_advance_2 = font.text_extents('i' * 10)
# Same yy as before:
assert y_advance == y_advance_2
# Bigger xx:
assert x_advance_mono_2 > x_advance_mono
def test_font_options():
options = FontOptions()
assert options.get_antialias() == cairocffi.ANTIALIAS_DEFAULT
options.set_antialias(cairocffi.ANTIALIAS_FAST)
assert options.get_antialias() == cairocffi.ANTIALIAS_FAST
assert options.get_subpixel_order() == cairocffi.SUBPIXEL_ORDER_DEFAULT
options.set_subpixel_order(cairocffi.SUBPIXEL_ORDER_BGR)
assert options.get_subpixel_order() == cairocffi.SUBPIXEL_ORDER_BGR
assert options.get_hint_style() == cairocffi.HINT_STYLE_DEFAULT
options.set_hint_style(cairocffi.HINT_STYLE_SLIGHT)
assert options.get_hint_style() == cairocffi.HINT_STYLE_SLIGHT
assert options.get_hint_metrics() == cairocffi.HINT_METRICS_DEFAULT
options.set_hint_metrics(cairocffi.HINT_METRICS_OFF)
assert options.get_hint_metrics() == cairocffi.HINT_METRICS_OFF
options_1 = FontOptions(hint_metrics=cairocffi.HINT_METRICS_ON)
assert options_1.get_hint_metrics() == cairocffi.HINT_METRICS_ON
assert options_1.get_antialias() == cairocffi.HINT_METRICS_DEFAULT
options_2 = options_1.copy()
assert options_2 == options_1
assert len(set([options_1, options_2])) == 1 # test __hash__
options_2.set_antialias(cairocffi.ANTIALIAS_BEST)
assert options_2 != options_1
assert len(set([options_1, options_2])) == 2
options_1.merge(options_2)
assert options_2 == options_1
@pytest.mark.xfail(cairo_version() < 11512,
reason='Cairo version too low')
def test_font_options_variations():
options = FontOptions()
assert options.get_variations() is None
options.set_variations('wght 400, wdth 300')
assert options.get_variations() == 'wght 400, wdth 300'
options.set_variations(None)
assert options.get_variations() is None
def test_glyphs():
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 20)
context = Context(surface)
font = context.get_scaled_font()
text = 'Étt'
glyphs, clusters, is_backwards = font.text_to_glyphs(
5, 15, text, with_clusters=True)
assert font.text_to_glyphs(5, 15, text, with_clusters=False) == glyphs
(idx1, x1, y1), (idx2, x2, y2), (idx3, x3, y3) = glyphs
assert idx1 != idx2 == idx3
assert y1 == y2 == y3 == 15
assert 5 == x1 < x2 < x3
assert clusters == [(2, 1), (1, 1), (1, 1)]
assert is_backwards == 0
assert round_tuple(font.glyph_extents(glyphs)) == (
round_tuple(font.text_extents(text)))
assert round_tuple(font.glyph_extents(glyphs)) == (
round_tuple(context.glyph_extents(glyphs)))
assert context.copy_path() == []
context.glyph_path(glyphs)
glyph_path = context.copy_path()
assert glyph_path
context.new_path()
assert context.copy_path() == []
context.move_to(10, 20) # Not the same coordinates as text_to_glyphs
context.text_path(text)
assert context.copy_path() != []
assert context.copy_path() != glyph_path
context.new_path()
assert context.copy_path() == []
context.move_to(5, 15)
context.text_path(text)
text_path = context.copy_path()
# For some reason, paths end with a different on old cairo.
assert text_path[:-1] == glyph_path[:-1]
empty = b'\x00' * 100 * 20 * 4
assert surface.get_data()[:] == empty
context.show_glyphs(glyphs)
glyph_pixels = surface.get_data()[:]
assert glyph_pixels != empty
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 20)
context = Context(surface)
context.move_to(5, 15)
context.show_text_glyphs(text, glyphs, clusters, is_backwards)
text_glyphs_pixels = surface.get_data()[:]
assert glyph_pixels == text_glyphs_pixels
surface = ImageSurface(cairocffi.FORMAT_ARGB32, 100, 20)
context = Context(surface)
context.move_to(5, 15)
context.show_text(text)
text_pixels = surface.get_data()[:]
assert glyph_pixels == text_pixels
def test_from_null_pointer():
for class_ in [Surface, Context, Pattern, FontFace, ScaledFont]:
with pytest.raises(ValueError):
class_._from_pointer(cairocffi.ffi.NULL, 'unused')
|
smadhusu/AppRTC
|
refs/heads/master
|
src/third_party/apiclient/model.py
|
102
|
#!/usr/bin/python2.4
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model objects for requests and responses.
Each API may support one or more serializations, such
as JSON, Atom, etc. The model classes are responsible
for converting between the wire format and the Python
object representation.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import logging
import urllib
from apiclient import __version__
from errors import HttpError
from oauth2client.anyjson import simplejson
dump_request_response = False
def _abstract():
raise NotImplementedError('You need to override this function')
class Model(object):
"""Model base class.
All Model classes should implement this interface.
The Model serializes and de-serializes between a wire
format such as JSON and a Python object representation.
"""
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized in the desired wire format.
"""
_abstract()
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
_abstract()
class BaseModel(Model):
"""Base model class.
Subclasses should provide implementations for the "serialize" and
"deserialize" methods, as well as values for the following class attributes.
Attributes:
accept: The value to use for the HTTP Accept header.
content_type: The value to use for the HTTP Content-type header.
no_content_response: The value to return when deserializing a 204 "No
Content" response.
alt_param: The value to supply as the "alt" query parameter for requests.
"""
accept = None
content_type = None
no_content_response = None
alt_param = None
def _log_request(self, headers, path_params, query, body):
"""Logs debugging information about the request if requested."""
if dump_request_response:
logging.info('--request-start--')
logging.info('-headers-start-')
for h, v in headers.iteritems():
logging.info('%s: %s', h, v)
logging.info('-headers-end-')
logging.info('-path-parameters-start-')
for h, v in path_params.iteritems():
logging.info('%s: %s', h, v)
logging.info('-path-parameters-end-')
logging.info('body: %s', body)
logging.info('query: %s', query)
logging.info('--request-end--')
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable by simplejson.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized as JSON
"""
query = self._build_query(query_params)
headers['accept'] = self.accept
headers['accept-encoding'] = 'gzip, deflate'
if 'user-agent' in headers:
headers['user-agent'] += ' '
else:
headers['user-agent'] = ''
headers['user-agent'] += 'google-api-python-client/%s (gzip)' % __version__
if body_value is not None:
headers['content-type'] = self.content_type
body_value = self.serialize(body_value)
self._log_request(headers, path_params, query, body_value)
return (headers, path_params, query, body_value)
def _build_query(self, params):
"""Builds a query string.
Args:
params: dict, the query parameters
Returns:
The query parameters properly encoded into an HTTP URI query string.
"""
if self.alt_param is not None:
params.update({'alt': self.alt_param})
astuples = []
for key, value in params.iteritems():
if type(value) == type([]):
for x in value:
x = x.encode('utf-8')
astuples.append((key, x))
else:
if getattr(value, 'encode', False) and callable(value.encode):
value = value.encode('utf-8')
astuples.append((key, value))
return '?' + urllib.urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
if dump_request_response:
logging.info('--response-start--')
for h, v in resp.iteritems():
logging.info('%s: %s', h, v)
if content:
logging.info(content)
logging.info('--response-end--')
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
self._log_response(resp, content)
# Error handling is TBD, for example, do we retry
# for some operation/error combinations?
if resp.status < 300:
if resp.status == 204:
# A 204: No Content response should be treated differently
# to all the other success states
return self.no_content_response
return self.deserialize(content)
else:
logging.debug('Content from bad request was: %s' % content)
raise HttpError(resp, content)
def serialize(self, body_value):
"""Perform the actual Python object serialization.
Args:
body_value: object, the request body as a Python object.
Returns:
string, the body in serialized form.
"""
_abstract()
def deserialize(self, content):
"""Perform the actual deserialization from response string to Python
object.
Args:
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
"""
_abstract()
class JsonModel(BaseModel):
"""Model class for JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request and response bodies.
"""
accept = 'application/json'
content_type = 'application/json'
alt_param = 'json'
def __init__(self, data_wrapper=False):
"""Construct a JsonModel.
Args:
data_wrapper: boolean, wrap requests and responses in a data wrapper
"""
self._data_wrapper = data_wrapper
def serialize(self, body_value):
if (isinstance(body_value, dict) and 'data' not in body_value and
self._data_wrapper):
body_value = {'data': body_value}
return simplejson.dumps(body_value)
def deserialize(self, content):
content = content.decode('utf-8')
body = simplejson.loads(content)
if self._data_wrapper and isinstance(body, dict) and 'data' in body:
body = body['data']
return body
@property
def no_content_response(self):
return {}
class RawModel(JsonModel):
"""Model class for requests that don't return JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = None
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class MediaModel(JsonModel):
"""Model class for requests that return Media.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = 'media'
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class ProtocolBufferModel(BaseModel):
"""Model class for protocol buffers.
Serializes and de-serializes the binary protocol buffer sent in the HTTP
request and response bodies.
"""
accept = 'application/x-protobuf'
content_type = 'application/x-protobuf'
alt_param = 'proto'
def __init__(self, protocol_buffer):
"""Constructs a ProtocolBufferModel.
The serialzed protocol buffer returned in an HTTP response will be
de-serialized using the given protocol buffer class.
Args:
protocol_buffer: The protocol buffer class used to de-serialize a
response from the API.
"""
self._protocol_buffer = protocol_buffer
def serialize(self, body_value):
return body_value.SerializeToString()
def deserialize(self, content):
return self._protocol_buffer.FromString(content)
@property
def no_content_response(self):
return self._protocol_buffer()
def makepatch(original, modified):
"""Create a patch object.
Some methods support PATCH, an efficient way to send updates to a resource.
This method allows the easy construction of patch bodies by looking at the
differences between a resource before and after it was modified.
Args:
original: object, the original deserialized resource
modified: object, the modified deserialized resource
Returns:
An object that contains only the changes from original to modified, in a
form suitable to pass to a PATCH method.
Example usage:
item = service.activities().get(postid=postid, userid=userid).execute()
original = copy.deepcopy(item)
item['object']['content'] = 'This is updated.'
service.activities.patch(postid=postid, userid=userid,
body=makepatch(original, item)).execute()
"""
patch = {}
for key, original_value in original.iteritems():
modified_value = modified.get(key, None)
if modified_value is None:
# Use None to signal that the element is deleted
patch[key] = None
elif original_value != modified_value:
if type(original_value) == type({}):
# Recursively descend objects
patch[key] = makepatch(original_value, modified_value)
else:
# In the case of simple types or arrays we just replace
patch[key] = modified_value
else:
# Don't add anything to patch if there's no change
pass
for key in modified:
if key not in original:
patch[key] = modified[key]
return patch
|
eahneahn/free
|
refs/heads/master
|
lib/python2.7/site-packages/pip-1.5-py2.7.egg/pip/_vendor/requests/packages/charade/euctwfreq.py
|
3132
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
|
jlopex/kernel_linaro_snowball
|
refs/heads/mesh
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
Serag8/Bachelor
|
refs/heads/master
|
google_appengine/lib/jinja2-2.6/jinja2/testsuite/debug.py
|
114
|
# -*- coding: utf-8 -*-
"""
jinja2.testsuite.debug
~~~~~~~~~~~~~~~~~~~~~~
Tests the debug system.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
from jinja2.testsuite import JinjaTestCase, filesystem_loader
from jinja2 import Environment, TemplateSyntaxError
env = Environment(loader=filesystem_loader)
class DebugTestCase(JinjaTestCase):
if sys.version_info[:2] != (2, 4):
def test_runtime_error(self):
def test():
tmpl.render(fail=lambda: 1 / 0)
tmpl = env.get_template('broken.html')
self.assert_traceback_matches(test, r'''
File ".*?broken.html", line 2, in (top-level template code|<module>)
\{\{ fail\(\) \}\}
File ".*?debug.pyc?", line \d+, in <lambda>
tmpl\.render\(fail=lambda: 1 / 0\)
ZeroDivisionError: (int(eger)? )?division (or modulo )?by zero
''')
def test_syntax_error(self):
# XXX: the .*? is necessary for python3 which does not hide
# some of the stack frames we don't want to show. Not sure
# what's up with that, but that is not that critical. Should
# be fixed though.
self.assert_traceback_matches(lambda: env.get_template('syntaxerror.html'), r'''(?sm)
File ".*?syntaxerror.html", line 4, in (template|<module>)
\{% endif %\}.*?
(jinja2\.exceptions\.)?TemplateSyntaxError: Encountered unknown tag 'endif'. Jinja was looking for the following tags: 'endfor' or 'else'. The innermost block that needs to be closed is 'for'.
''')
def test_regular_syntax_error(self):
def test():
raise TemplateSyntaxError('wtf', 42)
self.assert_traceback_matches(test, r'''
File ".*debug.pyc?", line \d+, in test
raise TemplateSyntaxError\('wtf', 42\)
(jinja2\.exceptions\.)?TemplateSyntaxError: wtf
line 42''')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DebugTestCase))
return suite
|
hjanime/VisTrails
|
refs/heads/master
|
vistrails/db/versions/v1_0_1/domain/auto_gen.py
|
1
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
from __future__ import division
import copy
class DBOpmProcessIdEffect(object):
vtType = 'opm_process_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmWasGeneratedBy(object):
vtType = 'opm_was_generated_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasGeneratedBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasGeneratedBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasGeneratedBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBOpmAccounts(object):
vtType = 'opm_accounts'
def __init__(self, accounts=None, opm_overlapss=None):
self.db_deleted_accounts = []
self.db_accounts_id_index = {}
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
for v in self._db_accounts:
self.db_accounts_id_index[v.db_id] = v
self.db_deleted_opm_overlapss = []
if opm_overlapss is None:
self._db_opm_overlapss = []
else:
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccounts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccounts()
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_overlapss is None:
cp._db_opm_overlapss = []
else:
cp._db_opm_overlapss = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_overlapss]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_accounts_id_index = dict((v.db_id, v) for v in cp._db_accounts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccounts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccount.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccount.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_overlapss' in class_dict:
res = class_dict['opm_overlapss'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_overlaps(obj)
elif hasattr(old_obj, 'db_opm_overlapss') and old_obj.db_opm_overlapss is not None:
for obj in old_obj.db_opm_overlapss:
new_obj.db_add_opm_overlaps(DBOpmOverlaps.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_overlapss') and hasattr(new_obj, 'db_deleted_opm_overlapss'):
for obj in old_obj.db_deleted_opm_overlapss:
n_obj = DBOpmOverlaps.update_version(obj, trans_dict)
new_obj.db_deleted_opm_overlapss.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_overlapss:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_overlaps(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_overlapss)
if remove:
self.db_deleted_accounts = []
self.db_deleted_opm_overlapss = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_overlapss:
if child.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_change_account(self, account):
self.is_dirty = True
found = False
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
self._db_accounts[i] = account
found = True
break
if not found:
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_delete_account(self, account):
self.is_dirty = True
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
if not self._db_accounts[i].is_new:
self.db_deleted_accounts.append(self._db_accounts[i])
del self._db_accounts[i]
break
del self.db_accounts_id_index[account.db_id]
def db_get_account(self, key):
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == key:
return self._db_accounts[i]
return None
def db_get_account_by_id(self, key):
return self.db_accounts_id_index[key]
def db_has_account_with_id(self, key):
return key in self.db_accounts_id_index
def __get_db_opm_overlapss(self):
return self._db_opm_overlapss
def __set_db_opm_overlapss(self, opm_overlapss):
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
db_opm_overlapss = property(__get_db_opm_overlapss, __set_db_opm_overlapss)
def db_get_opm_overlapss(self):
return self._db_opm_overlapss
def db_add_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_change_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_delete_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_overlaps(self, key):
return None
class DBPortSpec(object):
vtType = 'portSpec'
def __init__(self, id=None, name=None, type=None, optional=None, sort_key=None, sigstring=None, labels=None, defaults=None):
self._db_id = id
self._db_name = name
self._db_type = type
self._db_optional = optional
self._db_sort_key = sort_key
self._db_sigstring = sigstring
self._db_labels = labels
self._db_defaults = defaults
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpec(id=self._db_id,
name=self._db_name,
type=self._db_type,
optional=self._db_optional,
sort_key=self._db_sort_key,
sigstring=self._db_sigstring,
labels=self._db_labels,
defaults=self._db_defaults)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'optional' in class_dict:
res = class_dict['optional'](old_obj, trans_dict)
new_obj.db_optional = res
elif hasattr(old_obj, 'db_optional') and old_obj.db_optional is not None:
new_obj.db_optional = old_obj.db_optional
if 'sort_key' in class_dict:
res = class_dict['sort_key'](old_obj, trans_dict)
new_obj.db_sort_key = res
elif hasattr(old_obj, 'db_sort_key') and old_obj.db_sort_key is not None:
new_obj.db_sort_key = old_obj.db_sort_key
if 'sigstring' in class_dict:
res = class_dict['sigstring'](old_obj, trans_dict)
new_obj.db_sigstring = res
elif hasattr(old_obj, 'db_sigstring') and old_obj.db_sigstring is not None:
new_obj.db_sigstring = old_obj.db_sigstring
if 'labels' in class_dict:
res = class_dict['labels'](old_obj, trans_dict)
new_obj.db_labels = res
elif hasattr(old_obj, 'db_labels') and old_obj.db_labels is not None:
new_obj.db_labels = old_obj.db_labels
if 'defaults' in class_dict:
res = class_dict['defaults'](old_obj, trans_dict)
new_obj.db_defaults = res
elif hasattr(old_obj, 'db_defaults') and old_obj.db_defaults is not None:
new_obj.db_defaults = old_obj.db_defaults
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_optional(self):
return self._db_optional
def __set_db_optional(self, optional):
self._db_optional = optional
self.is_dirty = True
db_optional = property(__get_db_optional, __set_db_optional)
def db_add_optional(self, optional):
self._db_optional = optional
def db_change_optional(self, optional):
self._db_optional = optional
def db_delete_optional(self, optional):
self._db_optional = None
def __get_db_sort_key(self):
return self._db_sort_key
def __set_db_sort_key(self, sort_key):
self._db_sort_key = sort_key
self.is_dirty = True
db_sort_key = property(__get_db_sort_key, __set_db_sort_key)
def db_add_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_change_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_delete_sort_key(self, sort_key):
self._db_sort_key = None
def __get_db_sigstring(self):
return self._db_sigstring
def __set_db_sigstring(self, sigstring):
self._db_sigstring = sigstring
self.is_dirty = True
db_sigstring = property(__get_db_sigstring, __set_db_sigstring)
def db_add_sigstring(self, sigstring):
self._db_sigstring = sigstring
def db_change_sigstring(self, sigstring):
self._db_sigstring = sigstring
def db_delete_sigstring(self, sigstring):
self._db_sigstring = None
def __get_db_labels(self):
return self._db_labels
def __set_db_labels(self, labels):
self._db_labels = labels
self.is_dirty = True
db_labels = property(__get_db_labels, __set_db_labels)
def db_add_labels(self, labels):
self._db_labels = labels
def db_change_labels(self, labels):
self._db_labels = labels
def db_delete_labels(self, labels):
self._db_labels = None
def __get_db_defaults(self):
return self._db_defaults
def __set_db_defaults(self, defaults):
self._db_defaults = defaults
self.is_dirty = True
db_defaults = property(__get_db_defaults, __set_db_defaults)
def db_add_defaults(self, defaults):
self._db_defaults = defaults
def db_change_defaults(self, defaults):
self._db_defaults = defaults
def db_delete_defaults(self, defaults):
self._db_defaults = None
def getPrimaryKey(self):
return self._db_id
class DBModule(object):
vtType = 'module'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, tag=None, location=None, functions=None, annotations=None, portSpecs=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModule.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModule(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
tag=self._db_tag)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModule()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBModuleDescriptor(object):
vtType = 'module_descriptor'
def __init__(self, id=None, name=None, package=None, namespace=None, package_version=None, version=None, base_descriptor_id=None, portSpecs=None):
self._db_id = id
self._db_name = name
self._db_package = package
self._db_namespace = namespace
self._db_package_version = package_version
self._db_version = version
self._db_base_descriptor_id = base_descriptor_id
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleDescriptor.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleDescriptor(id=self._db_id,
name=self._db_name,
package=self._db_package,
namespace=self._db_namespace,
package_version=self._db_package_version,
version=self._db_version,
base_descriptor_id=self._db_base_descriptor_id)
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_base_descriptor_id') and ('module_descriptor', self._db_base_descriptor_id) in id_remap:
cp._db_base_descriptor_id = id_remap[('module_descriptor', self._db_base_descriptor_id)]
# recreate indices and set flags
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleDescriptor()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package_version' in class_dict:
res = class_dict['package_version'](old_obj, trans_dict)
new_obj.db_package_version = res
elif hasattr(old_obj, 'db_package_version') and old_obj.db_package_version is not None:
new_obj.db_package_version = old_obj.db_package_version
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'base_descriptor_id' in class_dict:
res = class_dict['base_descriptor_id'](old_obj, trans_dict)
new_obj.db_base_descriptor_id = res
elif hasattr(old_obj, 'db_base_descriptor_id') and old_obj.db_base_descriptor_id is not None:
new_obj.db_base_descriptor_id = old_obj.db_base_descriptor_id
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package_version(self):
return self._db_package_version
def __set_db_package_version(self, package_version):
self._db_package_version = package_version
self.is_dirty = True
db_package_version = property(__get_db_package_version, __set_db_package_version)
def db_add_package_version(self, package_version):
self._db_package_version = package_version
def db_change_package_version(self, package_version):
self._db_package_version = package_version
def db_delete_package_version(self, package_version):
self._db_package_version = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_base_descriptor_id(self):
return self._db_base_descriptor_id
def __set_db_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
self.is_dirty = True
db_base_descriptor_id = property(__get_db_base_descriptor_id, __set_db_base_descriptor_id)
def db_add_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_change_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_delete_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = None
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBTag(object):
vtType = 'tag'
def __init__(self, id=None, name=None):
self._db_id = id
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBTag.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBTag(id=self._db_id,
name=self._db_name)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('action', self._db_id) in id_remap:
cp._db_id = id_remap[('action', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBTag()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBOpmRole(object):
vtType = 'opm_role'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmRole.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmRole(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmRole()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBOpmProcesses(object):
vtType = 'opm_processes'
def __init__(self, processs=None):
self.db_deleted_processs = []
self.db_processs_id_index = {}
if processs is None:
self._db_processs = []
else:
self._db_processs = processs
for v in self._db_processs:
self.db_processs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcesses.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcesses()
if self._db_processs is None:
cp._db_processs = []
else:
cp._db_processs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_processs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_processs_id_index = dict((v.db_id, v) for v in cp._db_processs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcesses()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'processs' in class_dict:
res = class_dict['processs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_process(obj)
elif hasattr(old_obj, 'db_processs') and old_obj.db_processs is not None:
for obj in old_obj.db_processs:
new_obj.db_add_process(DBOpmProcess.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processs') and hasattr(new_obj, 'db_deleted_processs'):
for obj in old_obj.db_deleted_processs:
n_obj = DBOpmProcess.update_version(obj, trans_dict)
new_obj.db_deleted_processs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_processs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_process(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_processs)
if remove:
self.db_deleted_processs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_processs:
if child.has_changes():
return True
return False
def __get_db_processs(self):
return self._db_processs
def __set_db_processs(self, processs):
self._db_processs = processs
self.is_dirty = True
db_processs = property(__get_db_processs, __set_db_processs)
def db_get_processs(self):
return self._db_processs
def db_add_process(self, process):
self.is_dirty = True
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_change_process(self, process):
self.is_dirty = True
found = False
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
self._db_processs[i] = process
found = True
break
if not found:
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_delete_process(self, process):
self.is_dirty = True
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
if not self._db_processs[i].is_new:
self.db_deleted_processs.append(self._db_processs[i])
del self._db_processs[i]
break
del self.db_processs_id_index[process.db_id]
def db_get_process(self, key):
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == key:
return self._db_processs[i]
return None
def db_get_process_by_id(self, key):
return self.db_processs_id_index[key]
def db_has_process_with_id(self, key):
return key in self.db_processs_id_index
class DBOpmAccountId(object):
vtType = 'opm_account_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccountId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccountId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_account', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_account', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccountId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBPort(object):
vtType = 'port'
def __init__(self, id=None, type=None, moduleId=None, moduleName=None, name=None, signature=None):
self._db_id = id
self._db_type = type
self._db_moduleId = moduleId
self._db_moduleName = moduleName
self._db_name = name
self._db_signature = signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPort.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPort(id=self._db_id,
type=self._db_type,
moduleId=self._db_moduleId,
moduleName=self._db_moduleName,
name=self._db_name,
signature=self._db_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_moduleId') and ('module', self._db_moduleId) in id_remap:
cp._db_moduleId = id_remap[('module', self._db_moduleId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPort()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'moduleId' in class_dict:
res = class_dict['moduleId'](old_obj, trans_dict)
new_obj.db_moduleId = res
elif hasattr(old_obj, 'db_moduleId') and old_obj.db_moduleId is not None:
new_obj.db_moduleId = old_obj.db_moduleId
if 'moduleName' in class_dict:
res = class_dict['moduleName'](old_obj, trans_dict)
new_obj.db_moduleName = res
elif hasattr(old_obj, 'db_moduleName') and old_obj.db_moduleName is not None:
new_obj.db_moduleName = old_obj.db_moduleName
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'signature' in class_dict:
res = class_dict['signature'](old_obj, trans_dict)
new_obj.db_signature = res
elif hasattr(old_obj, 'db_signature') and old_obj.db_signature is not None:
new_obj.db_signature = old_obj.db_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_moduleId(self):
return self._db_moduleId
def __set_db_moduleId(self, moduleId):
self._db_moduleId = moduleId
self.is_dirty = True
db_moduleId = property(__get_db_moduleId, __set_db_moduleId)
def db_add_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_change_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_delete_moduleId(self, moduleId):
self._db_moduleId = None
def __get_db_moduleName(self):
return self._db_moduleName
def __set_db_moduleName(self, moduleName):
self._db_moduleName = moduleName
self.is_dirty = True
db_moduleName = property(__get_db_moduleName, __set_db_moduleName)
def db_add_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_change_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_delete_moduleName(self, moduleName):
self._db_moduleName = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_signature(self):
return self._db_signature
def __set_db_signature(self, signature):
self._db_signature = signature
self.is_dirty = True
db_signature = property(__get_db_signature, __set_db_signature)
def db_add_signature(self, signature):
self._db_signature = signature
def db_change_signature(self, signature):
self._db_signature = signature
def db_delete_signature(self, signature):
self._db_signature = None
def getPrimaryKey(self):
return self._db_id
class DBOpmArtifact(object):
vtType = 'opm_artifact'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifact.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifact(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifact()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmArtifactValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmArtifactValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBGroup(object):
vtType = 'group'
def __init__(self, id=None, workflow=None, cache=None, name=None, namespace=None, package=None, version=None, tag=None, location=None, functions=None, annotations=None):
self._db_id = id
self.db_deleted_workflow = []
self._db_workflow = workflow
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroup(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
tag=self._db_tag)
if self._db_workflow is not None:
cp._db_workflow = self._db_workflow.do_copy()
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'workflow' in class_dict:
res = class_dict['workflow'](old_obj, trans_dict)
new_obj.db_workflow = res
elif hasattr(old_obj, 'db_workflow') and old_obj.db_workflow is not None:
obj = old_obj.db_workflow
new_obj.db_add_workflow(DBWorkflow.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow') and hasattr(new_obj, 'db_deleted_workflow'):
for obj in old_obj.db_deleted_workflow:
n_obj = DBWorkflow.update_version(obj, trans_dict)
new_obj.db_deleted_workflow.append(n_obj)
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow)
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_workflow = []
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_workflow is not None and self._db_workflow.has_changes():
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_workflow(self):
return self._db_workflow
def __set_db_workflow(self, workflow):
self._db_workflow = workflow
self.is_dirty = True
db_workflow = property(__get_db_workflow, __set_db_workflow)
def db_add_workflow(self, workflow):
self._db_workflow = workflow
def db_change_workflow(self, workflow):
self._db_workflow = workflow
def db_delete_workflow(self, workflow):
if not self.is_new:
self.db_deleted_workflow.append(self._db_workflow)
self._db_workflow = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBLog(object):
vtType = 'log'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, workflow_execs=None, machines=None, vistrail_id=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_workflow_execs = []
self.db_workflow_execs_id_index = {}
if workflow_execs is None:
self._db_workflow_execs = []
else:
self._db_workflow_execs = workflow_execs
for v in self._db_workflow_execs:
self.db_workflow_execs_id_index[v.db_id] = v
self.db_deleted_machines = []
self.db_machines_id_index = {}
if machines is None:
self._db_machines = []
else:
self._db_machines = machines
for v in self._db_machines:
self.db_machines_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLog(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_workflow_execs is None:
cp._db_workflow_execs = []
else:
cp._db_workflow_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_workflow_execs]
if self._db_machines is None:
cp._db_machines = []
else:
cp._db_machines = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_machines]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_workflow_execs_id_index = dict((v.db_id, v) for v in cp._db_workflow_execs)
cp.db_machines_id_index = dict((v.db_id, v) for v in cp._db_machines)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'workflow_execs' in class_dict:
res = class_dict['workflow_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_workflow_exec(obj)
elif hasattr(old_obj, 'db_workflow_execs') and old_obj.db_workflow_execs is not None:
for obj in old_obj.db_workflow_execs:
new_obj.db_add_workflow_exec(DBWorkflowExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow_execs') and hasattr(new_obj, 'db_deleted_workflow_execs'):
for obj in old_obj.db_deleted_workflow_execs:
n_obj = DBWorkflowExec.update_version(obj, trans_dict)
new_obj.db_deleted_workflow_execs.append(n_obj)
if 'machines' in class_dict:
res = class_dict['machines'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_machine(obj)
elif hasattr(old_obj, 'db_machines') and old_obj.db_machines is not None:
for obj in old_obj.db_machines:
new_obj.db_add_machine(DBMachine.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_machines') and hasattr(new_obj, 'db_deleted_machines'):
for obj in old_obj.db_deleted_machines:
n_obj = DBMachine.update_version(obj, trans_dict)
new_obj.db_deleted_machines.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_workflow_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_workflow_exec(child)
to_del = []
for child in self.db_machines:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_machine(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow_execs)
children.extend(self.db_deleted_machines)
if remove:
self.db_deleted_workflow_execs = []
self.db_deleted_machines = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_workflow_execs:
if child.has_changes():
return True
for child in self._db_machines:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_workflow_execs(self):
return self._db_workflow_execs
def __set_db_workflow_execs(self, workflow_execs):
self._db_workflow_execs = workflow_execs
self.is_dirty = True
db_workflow_execs = property(__get_db_workflow_execs, __set_db_workflow_execs)
def db_get_workflow_execs(self):
return self._db_workflow_execs
def db_add_workflow_exec(self, workflow_exec):
self.is_dirty = True
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_change_workflow_exec(self, workflow_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
self._db_workflow_execs[i] = workflow_exec
found = True
break
if not found:
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_delete_workflow_exec(self, workflow_exec):
self.is_dirty = True
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
if not self._db_workflow_execs[i].is_new:
self.db_deleted_workflow_execs.append(self._db_workflow_execs[i])
del self._db_workflow_execs[i]
break
del self.db_workflow_execs_id_index[workflow_exec.db_id]
def db_get_workflow_exec(self, key):
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == key:
return self._db_workflow_execs[i]
return None
def db_get_workflow_exec_by_id(self, key):
return self.db_workflow_execs_id_index[key]
def db_has_workflow_exec_with_id(self, key):
return key in self.db_workflow_execs_id_index
def __get_db_machines(self):
return self._db_machines
def __set_db_machines(self, machines):
self._db_machines = machines
self.is_dirty = True
db_machines = property(__get_db_machines, __set_db_machines)
def db_get_machines(self):
return self._db_machines
def db_add_machine(self, machine):
self.is_dirty = True
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_change_machine(self, machine):
self.is_dirty = True
found = False
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
self._db_machines[i] = machine
found = True
break
if not found:
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_delete_machine(self, machine):
self.is_dirty = True
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
if not self._db_machines[i].is_new:
self.db_deleted_machines.append(self._db_machines[i])
del self._db_machines[i]
break
del self.db_machines_id_index[machine.db_id]
def db_get_machine(self, key):
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == key:
return self._db_machines[i]
return None
def db_get_machine_by_id(self, key):
return self.db_machines_id_index[key]
def db_has_machine_with_id(self, key):
return key in self.db_machines_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBOpmAgents(object):
vtType = 'opm_agents'
def __init__(self, agents=None):
self.db_deleted_agents = []
self.db_agents_id_index = {}
if agents is None:
self._db_agents = []
else:
self._db_agents = agents
for v in self._db_agents:
self.db_agents_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgents.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgents()
if self._db_agents is None:
cp._db_agents = []
else:
cp._db_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_agents]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_agents_id_index = dict((v.db_id, v) for v in cp._db_agents)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgents()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_agent(obj)
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
for obj in old_obj.db_agents:
new_obj.db_add_agent(DBOpmAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgent.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_agents:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_agent(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_agents)
if remove:
self.db_deleted_agents = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_agents:
if child.has_changes():
return True
return False
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_get_agents(self):
return self._db_agents
def db_add_agent(self, agent):
self.is_dirty = True
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_change_agent(self, agent):
self.is_dirty = True
found = False
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
self._db_agents[i] = agent
found = True
break
if not found:
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_delete_agent(self, agent):
self.is_dirty = True
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
if not self._db_agents[i].is_new:
self.db_deleted_agents.append(self._db_agents[i])
del self._db_agents[i]
break
del self.db_agents_id_index[agent.db_id]
def db_get_agent(self, key):
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == key:
return self._db_agents[i]
return None
def db_get_agent_by_id(self, key):
return self.db_agents_id_index[key]
def db_has_agent_with_id(self, key):
return key in self.db_agents_id_index
class DBOpmProcessIdCause(object):
vtType = 'opm_process_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBMachine(object):
vtType = 'machine'
def __init__(self, id=None, name=None, os=None, architecture=None, processor=None, ram=None):
self._db_id = id
self._db_name = name
self._db_os = os
self._db_architecture = architecture
self._db_processor = processor
self._db_ram = ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMachine.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMachine(id=self._db_id,
name=self._db_name,
os=self._db_os,
architecture=self._db_architecture,
processor=self._db_processor,
ram=self._db_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrailId') and ('vistrail', self._db_vistrailId) in id_remap:
cp._db_vistrailId = id_remap[('vistrail', self._db_vistrailId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMachine()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'os' in class_dict:
res = class_dict['os'](old_obj, trans_dict)
new_obj.db_os = res
elif hasattr(old_obj, 'db_os') and old_obj.db_os is not None:
new_obj.db_os = old_obj.db_os
if 'architecture' in class_dict:
res = class_dict['architecture'](old_obj, trans_dict)
new_obj.db_architecture = res
elif hasattr(old_obj, 'db_architecture') and old_obj.db_architecture is not None:
new_obj.db_architecture = old_obj.db_architecture
if 'processor' in class_dict:
res = class_dict['processor'](old_obj, trans_dict)
new_obj.db_processor = res
elif hasattr(old_obj, 'db_processor') and old_obj.db_processor is not None:
new_obj.db_processor = old_obj.db_processor
if 'ram' in class_dict:
res = class_dict['ram'](old_obj, trans_dict)
new_obj.db_ram = res
elif hasattr(old_obj, 'db_ram') and old_obj.db_ram is not None:
new_obj.db_ram = old_obj.db_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_os(self):
return self._db_os
def __set_db_os(self, os):
self._db_os = os
self.is_dirty = True
db_os = property(__get_db_os, __set_db_os)
def db_add_os(self, os):
self._db_os = os
def db_change_os(self, os):
self._db_os = os
def db_delete_os(self, os):
self._db_os = None
def __get_db_architecture(self):
return self._db_architecture
def __set_db_architecture(self, architecture):
self._db_architecture = architecture
self.is_dirty = True
db_architecture = property(__get_db_architecture, __set_db_architecture)
def db_add_architecture(self, architecture):
self._db_architecture = architecture
def db_change_architecture(self, architecture):
self._db_architecture = architecture
def db_delete_architecture(self, architecture):
self._db_architecture = None
def __get_db_processor(self):
return self._db_processor
def __set_db_processor(self, processor):
self._db_processor = processor
self.is_dirty = True
db_processor = property(__get_db_processor, __set_db_processor)
def db_add_processor(self, processor):
self._db_processor = processor
def db_change_processor(self, processor):
self._db_processor = processor
def db_delete_processor(self, processor):
self._db_processor = None
def __get_db_ram(self):
return self._db_ram
def __set_db_ram(self, ram):
self._db_ram = ram
self.is_dirty = True
db_ram = property(__get_db_ram, __set_db_ram)
def db_add_ram(self, ram):
self._db_ram = ram
def db_change_ram(self, ram):
self._db_ram = ram
def db_delete_ram(self, ram):
self._db_ram = None
def getPrimaryKey(self):
return self._db_id
class DBAdd(object):
vtType = 'add'
def __init__(self, data=None, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAdd.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAdd(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAdd()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBOther(object):
vtType = 'other'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOther.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOther(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOther()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBLocation(object):
vtType = 'location'
def __init__(self, id=None, x=None, y=None):
self._db_id = id
self._db_x = x
self._db_y = y
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLocation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLocation(id=self._db_id,
x=self._db_x,
y=self._db_y)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLocation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'x' in class_dict:
res = class_dict['x'](old_obj, trans_dict)
new_obj.db_x = res
elif hasattr(old_obj, 'db_x') and old_obj.db_x is not None:
new_obj.db_x = old_obj.db_x
if 'y' in class_dict:
res = class_dict['y'](old_obj, trans_dict)
new_obj.db_y = res
elif hasattr(old_obj, 'db_y') and old_obj.db_y is not None:
new_obj.db_y = old_obj.db_y
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_x(self):
return self._db_x
def __set_db_x(self, x):
self._db_x = x
self.is_dirty = True
db_x = property(__get_db_x, __set_db_x)
def db_add_x(self, x):
self._db_x = x
def db_change_x(self, x):
self._db_x = x
def db_delete_x(self, x):
self._db_x = None
def __get_db_y(self):
return self._db_y
def __set_db_y(self, y):
self._db_y = y
self.is_dirty = True
db_y = property(__get_db_y, __set_db_y)
def db_add_y(self, y):
self._db_y = y
def db_change_y(self, y):
self._db_y = y
def db_delete_y(self, y):
self._db_y = None
def getPrimaryKey(self):
return self._db_id
class DBOpmOverlaps(object):
vtType = 'opm_overlaps'
def __init__(self, opm_account_ids=None):
self.db_deleted_opm_account_ids = []
if opm_account_ids is None:
self._db_opm_account_ids = []
else:
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmOverlaps.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmOverlaps()
if self._db_opm_account_ids is None:
cp._db_opm_account_ids = []
else:
cp._db_opm_account_ids = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_account_ids]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmOverlaps()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'opm_account_ids' in class_dict:
res = class_dict['opm_account_ids'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_account_id(obj)
elif hasattr(old_obj, 'db_opm_account_ids') and old_obj.db_opm_account_ids is not None:
for obj in old_obj.db_opm_account_ids:
new_obj.db_add_opm_account_id(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_account_ids') and hasattr(new_obj, 'db_deleted_opm_account_ids'):
for obj in old_obj.db_deleted_opm_account_ids:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_opm_account_ids.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_opm_account_ids:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_account_id(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_opm_account_ids)
if remove:
self.db_deleted_opm_account_ids = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_opm_account_ids:
if child.has_changes():
return True
return False
def __get_db_opm_account_ids(self):
return self._db_opm_account_ids
def __set_db_opm_account_ids(self, opm_account_ids):
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
db_opm_account_ids = property(__get_db_opm_account_ids, __set_db_opm_account_ids)
def db_get_opm_account_ids(self):
return self._db_opm_account_ids
def db_add_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_change_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_delete_opm_account_id(self, opm_account_id):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_account_id(self, key):
return None
class DBOpmArtifacts(object):
vtType = 'opm_artifacts'
def __init__(self, artifacts=None):
self.db_deleted_artifacts = []
self.db_artifacts_id_index = {}
if artifacts is None:
self._db_artifacts = []
else:
self._db_artifacts = artifacts
for v in self._db_artifacts:
self.db_artifacts_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifacts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifacts()
if self._db_artifacts is None:
cp._db_artifacts = []
else:
cp._db_artifacts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_artifacts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_artifacts_id_index = dict((v.db_id, v) for v in cp._db_artifacts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifacts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_artifact(obj)
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
for obj in old_obj.db_artifacts:
new_obj.db_add_artifact(DBOpmArtifact.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifact.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_artifacts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_artifact(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_artifacts)
if remove:
self.db_deleted_artifacts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_artifacts:
if child.has_changes():
return True
return False
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_get_artifacts(self):
return self._db_artifacts
def db_add_artifact(self, artifact):
self.is_dirty = True
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_change_artifact(self, artifact):
self.is_dirty = True
found = False
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
self._db_artifacts[i] = artifact
found = True
break
if not found:
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_delete_artifact(self, artifact):
self.is_dirty = True
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
if not self._db_artifacts[i].is_new:
self.db_deleted_artifacts.append(self._db_artifacts[i])
del self._db_artifacts[i]
break
del self.db_artifacts_id_index[artifact.db_id]
def db_get_artifact(self, key):
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == key:
return self._db_artifacts[i]
return None
def db_get_artifact_by_id(self, key):
return self.db_artifacts_id_index[key]
def db_has_artifact_with_id(self, key):
return key in self.db_artifacts_id_index
class DBOpmDependencies(object):
vtType = 'opm_dependencies'
def __init__(self, dependencys=None):
self.db_deleted_dependencys = []
if dependencys is None:
self._db_dependencys = []
else:
self._db_dependencys = dependencys
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmDependencies.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmDependencies()
if self._db_dependencys is None:
cp._db_dependencys = []
else:
cp._db_dependencys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_dependencys]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmDependencies()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'dependencys' in class_dict:
res = class_dict['dependencys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_dependency(obj)
elif hasattr(old_obj, 'db_dependencys') and old_obj.db_dependencys is not None:
for obj in old_obj.db_dependencys:
if obj.vtType == 'opm_used':
new_obj.db_add_dependency(DBOpmUsed.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_generated_by':
new_obj.db_add_dependency(DBOpmWasGeneratedBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_triggered_by':
new_obj.db_add_dependency(DBOpmWasTriggeredBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_derived_from':
new_obj.db_add_dependency(DBOpmWasDerivedFrom.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_controlled_by':
new_obj.db_add_dependency(DBOpmWasControlledBy.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencys') and hasattr(new_obj, 'db_deleted_dependencys'):
for obj in old_obj.db_deleted_dependencys:
if obj.vtType == 'opm_used':
n_obj = DBOpmUsed.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_generated_by':
n_obj = DBOpmWasGeneratedBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_triggered_by':
n_obj = DBOpmWasTriggeredBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_derived_from':
n_obj = DBOpmWasDerivedFrom.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_controlled_by':
n_obj = DBOpmWasControlledBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_dependencys:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_dependency(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_dependencys)
if remove:
self.db_deleted_dependencys = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_dependencys:
if child.has_changes():
return True
return False
def __get_db_dependencys(self):
return self._db_dependencys
def __set_db_dependencys(self, dependencys):
self._db_dependencys = dependencys
self.is_dirty = True
db_dependencys = property(__get_db_dependencys, __set_db_dependencys)
def db_get_dependencys(self):
return self._db_dependencys
def db_add_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_change_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_delete_dependency(self, dependency):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_dependency(self, key):
return None
class DBParameter(object):
vtType = 'parameter'
def __init__(self, id=None, pos=None, name=None, type=None, val=None, alias=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self._db_type = type
self._db_val = val
self._db_alias = alias
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameter(id=self._db_id,
pos=self._db_pos,
name=self._db_name,
type=self._db_type,
val=self._db_val,
alias=self._db_alias)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'alias' in class_dict:
res = class_dict['alias'](old_obj, trans_dict)
new_obj.db_alias = res
elif hasattr(old_obj, 'db_alias') and old_obj.db_alias is not None:
new_obj.db_alias = old_obj.db_alias
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_alias(self):
return self._db_alias
def __set_db_alias(self, alias):
self._db_alias = alias
self.is_dirty = True
db_alias = property(__get_db_alias, __set_db_alias)
def db_add_alias(self, alias):
self._db_alias = alias
def db_change_alias(self, alias):
self._db_alias = alias
def db_delete_alias(self, alias):
self._db_alias = None
def getPrimaryKey(self):
return self._db_id
class DBOpmUsed(object):
vtType = 'opm_used'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmUsed.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmUsed()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmUsed()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBPluginData(object):
vtType = 'plugin_data'
def __init__(self, id=None, data=None):
self._db_id = id
self._db_data = data
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPluginData.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPluginData(id=self._db_id,
data=self._db_data)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPluginData()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
new_obj.db_data = old_obj.db_data
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
self._db_data = None
def getPrimaryKey(self):
return self._db_id
class DBFunction(object):
vtType = 'function'
def __init__(self, id=None, pos=None, name=None, parameters=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBFunction(id=self._db_id,
pos=self._db_pos,
name=self._db_name)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBAbstraction(object):
vtType = 'abstraction'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, internal_version=None, tag=None, location=None, functions=None, annotations=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_internal_version = internal_version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAbstraction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAbstraction(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
internal_version=self._db_internal_version,
tag=self._db_tag)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAbstraction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'internal_version' in class_dict:
res = class_dict['internal_version'](old_obj, trans_dict)
new_obj.db_internal_version = res
elif hasattr(old_obj, 'db_internal_version') and old_obj.db_internal_version is not None:
new_obj.db_internal_version = old_obj.db_internal_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_internal_version(self):
return self._db_internal_version
def __set_db_internal_version(self, internal_version):
self._db_internal_version = internal_version
self.is_dirty = True
db_internal_version = property(__get_db_internal_version, __set_db_internal_version)
def db_add_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_change_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_delete_internal_version(self, internal_version):
self._db_internal_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflow(object):
vtType = 'workflow'
def __init__(self, modules=None, id=None, entity_type=None, name=None, version=None, last_modified=None, connections=None, annotations=None, plugin_datas=None, others=None, vistrail_id=None):
self.db_deleted_modules = []
self.db_modules_id_index = {}
if modules is None:
self._db_modules = []
else:
self._db_modules = modules
for v in self._db_modules:
self.db_modules_id_index[v.db_id] = v
self._db_id = id
self._db_entity_type = entity_type
self._db_name = name
self._db_version = version
self._db_last_modified = last_modified
self.db_deleted_connections = []
self.db_connections_id_index = {}
if connections is None:
self._db_connections = []
else:
self._db_connections = connections
for v in self._db_connections:
self.db_connections_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_plugin_datas = []
self.db_plugin_datas_id_index = {}
if plugin_datas is None:
self._db_plugin_datas = []
else:
self._db_plugin_datas = plugin_datas
for v in self._db_plugin_datas:
self.db_plugin_datas_id_index[v.db_id] = v
self.db_deleted_others = []
self.db_others_id_index = {}
if others is None:
self._db_others = []
else:
self._db_others = others
for v in self._db_others:
self.db_others_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflow.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflow(id=self._db_id,
entity_type=self._db_entity_type,
name=self._db_name,
version=self._db_version,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_modules is None:
cp._db_modules = []
else:
cp._db_modules = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_modules]
if self._db_connections is None:
cp._db_connections = []
else:
cp._db_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_connections]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_plugin_datas is None:
cp._db_plugin_datas = []
else:
cp._db_plugin_datas = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_plugin_datas]
if self._db_others is None:
cp._db_others = []
else:
cp._db_others = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_others]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_modules_id_index = dict((v.db_id, v) for v in cp._db_modules)
cp.db_connections_id_index = dict((v.db_id, v) for v in cp._db_connections)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_plugin_datas_id_index = dict((v.db_id, v) for v in cp._db_plugin_datas)
cp.db_others_id_index = dict((v.db_id, v) for v in cp._db_others)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflow()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'modules' in class_dict:
res = class_dict['modules'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module(obj)
elif hasattr(old_obj, 'db_modules') and old_obj.db_modules is not None:
for obj in old_obj.db_modules:
if obj.vtType == 'module':
new_obj.db_add_module(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_module(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_module(DBGroup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_modules') and hasattr(new_obj, 'db_deleted_modules'):
for obj in old_obj.db_deleted_modules:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'connections' in class_dict:
res = class_dict['connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_connection(obj)
elif hasattr(old_obj, 'db_connections') and old_obj.db_connections is not None:
for obj in old_obj.db_connections:
new_obj.db_add_connection(DBConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_connections') and hasattr(new_obj, 'db_deleted_connections'):
for obj in old_obj.db_deleted_connections:
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_connections.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'plugin_datas' in class_dict:
res = class_dict['plugin_datas'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_plugin_data(obj)
elif hasattr(old_obj, 'db_plugin_datas') and old_obj.db_plugin_datas is not None:
for obj in old_obj.db_plugin_datas:
new_obj.db_add_plugin_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_plugin_datas') and hasattr(new_obj, 'db_deleted_plugin_datas'):
for obj in old_obj.db_deleted_plugin_datas:
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_plugin_datas.append(n_obj)
if 'others' in class_dict:
res = class_dict['others'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_other(obj)
elif hasattr(old_obj, 'db_others') and old_obj.db_others is not None:
for obj in old_obj.db_others:
new_obj.db_add_other(DBOther.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_others') and hasattr(new_obj, 'db_deleted_others'):
for obj in old_obj.db_deleted_others:
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_others.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_connection(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_plugin_datas:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_plugin_data(child)
to_del = []
for child in self.db_others:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_other(child)
to_del = []
for child in self.db_modules:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_connections)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_plugin_datas)
children.extend(self.db_deleted_others)
children.extend(self.db_deleted_modules)
if remove:
self.db_deleted_connections = []
self.db_deleted_annotations = []
self.db_deleted_plugin_datas = []
self.db_deleted_others = []
self.db_deleted_modules = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_connections:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_plugin_datas:
if child.has_changes():
return True
for child in self._db_others:
if child.has_changes():
return True
for child in self._db_modules:
if child.has_changes():
return True
return False
def __get_db_modules(self):
return self._db_modules
def __set_db_modules(self, modules):
self._db_modules = modules
self.is_dirty = True
db_modules = property(__get_db_modules, __set_db_modules)
def db_get_modules(self):
return self._db_modules
def db_add_module(self, module):
self.is_dirty = True
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_change_module(self, module):
self.is_dirty = True
found = False
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
self._db_modules[i] = module
found = True
break
if not found:
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_delete_module(self, module):
self.is_dirty = True
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
if not self._db_modules[i].is_new:
self.db_deleted_modules.append(self._db_modules[i])
del self._db_modules[i]
break
del self.db_modules_id_index[module.db_id]
def db_get_module(self, key):
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == key:
return self._db_modules[i]
return None
def db_get_module_by_id(self, key):
return self.db_modules_id_index[key]
def db_has_module_with_id(self, key):
return key in self.db_modules_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_connections(self):
return self._db_connections
def __set_db_connections(self, connections):
self._db_connections = connections
self.is_dirty = True
db_connections = property(__get_db_connections, __set_db_connections)
def db_get_connections(self):
return self._db_connections
def db_add_connection(self, connection):
self.is_dirty = True
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_change_connection(self, connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
self._db_connections[i] = connection
found = True
break
if not found:
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_delete_connection(self, connection):
self.is_dirty = True
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
if not self._db_connections[i].is_new:
self.db_deleted_connections.append(self._db_connections[i])
del self._db_connections[i]
break
del self.db_connections_id_index[connection.db_id]
def db_get_connection(self, key):
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == key:
return self._db_connections[i]
return None
def db_get_connection_by_id(self, key):
return self.db_connections_id_index[key]
def db_has_connection_with_id(self, key):
return key in self.db_connections_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_plugin_datas(self):
return self._db_plugin_datas
def __set_db_plugin_datas(self, plugin_datas):
self._db_plugin_datas = plugin_datas
self.is_dirty = True
db_plugin_datas = property(__get_db_plugin_datas, __set_db_plugin_datas)
def db_get_plugin_datas(self):
return self._db_plugin_datas
def db_add_plugin_data(self, plugin_data):
self.is_dirty = True
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_change_plugin_data(self, plugin_data):
self.is_dirty = True
found = False
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
self._db_plugin_datas[i] = plugin_data
found = True
break
if not found:
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_delete_plugin_data(self, plugin_data):
self.is_dirty = True
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
if not self._db_plugin_datas[i].is_new:
self.db_deleted_plugin_datas.append(self._db_plugin_datas[i])
del self._db_plugin_datas[i]
break
del self.db_plugin_datas_id_index[plugin_data.db_id]
def db_get_plugin_data(self, key):
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == key:
return self._db_plugin_datas[i]
return None
def db_get_plugin_data_by_id(self, key):
return self.db_plugin_datas_id_index[key]
def db_has_plugin_data_with_id(self, key):
return key in self.db_plugin_datas_id_index
def __get_db_others(self):
return self._db_others
def __set_db_others(self, others):
self._db_others = others
self.is_dirty = True
db_others = property(__get_db_others, __set_db_others)
def db_get_others(self):
return self._db_others
def db_add_other(self, other):
self.is_dirty = True
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_change_other(self, other):
self.is_dirty = True
found = False
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
self._db_others[i] = other
found = True
break
if not found:
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_delete_other(self, other):
self.is_dirty = True
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
if not self._db_others[i].is_new:
self.db_deleted_others.append(self._db_others[i])
del self._db_others[i]
break
del self.db_others_id_index[other.db_id]
def db_get_other(self, key):
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == key:
return self._db_others[i]
return None
def db_get_other_by_id(self, key):
return self.db_others_id_index[key]
def db_has_other_with_id(self, key):
return key in self.db_others_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBOpmArtifactIdCause(object):
vtType = 'opm_artifact_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmArtifactValue(object):
vtType = 'opm_artifact_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'portSpec':
new_obj.db_add_value(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_value(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
class DBOpmArtifactIdEffect(object):
vtType = 'opm_artifact_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmGraph(object):
vtType = 'opm_graph'
def __init__(self, accounts=None, processes=None, artifacts=None, agents=None, dependencies=None):
self.db_deleted_accounts = []
self._db_accounts = accounts
self.db_deleted_processes = []
self._db_processes = processes
self.db_deleted_artifacts = []
self._db_artifacts = artifacts
self.db_deleted_agents = []
self._db_agents = agents
self.db_deleted_dependencies = []
self._db_dependencies = dependencies
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmGraph.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmGraph()
if self._db_accounts is not None:
cp._db_accounts = self._db_accounts.do_copy(new_ids, id_scope, id_remap)
if self._db_processes is not None:
cp._db_processes = self._db_processes.do_copy(new_ids, id_scope, id_remap)
if self._db_artifacts is not None:
cp._db_artifacts = self._db_artifacts.do_copy(new_ids, id_scope, id_remap)
if self._db_agents is not None:
cp._db_agents = self._db_agents.do_copy(new_ids, id_scope, id_remap)
if self._db_dependencies is not None:
cp._db_dependencies = self._db_dependencies.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmGraph()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
new_obj.db_accounts = res
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
obj = old_obj.db_accounts
new_obj.db_add_accounts(DBOpmAccounts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccounts.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'processes' in class_dict:
res = class_dict['processes'](old_obj, trans_dict)
new_obj.db_processes = res
elif hasattr(old_obj, 'db_processes') and old_obj.db_processes is not None:
obj = old_obj.db_processes
new_obj.db_add_processes(DBOpmProcesses.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processes') and hasattr(new_obj, 'db_deleted_processes'):
for obj in old_obj.db_deleted_processes:
n_obj = DBOpmProcesses.update_version(obj, trans_dict)
new_obj.db_deleted_processes.append(n_obj)
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
new_obj.db_artifacts = res
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
obj = old_obj.db_artifacts
new_obj.db_add_artifacts(DBOpmArtifacts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifacts.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
new_obj.db_agents = res
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
obj = old_obj.db_agents
new_obj.db_add_agents(DBOpmAgents.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgents.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
if 'dependencies' in class_dict:
res = class_dict['dependencies'](old_obj, trans_dict)
new_obj.db_dependencies = res
elif hasattr(old_obj, 'db_dependencies') and old_obj.db_dependencies is not None:
obj = old_obj.db_dependencies
new_obj.db_add_dependencies(DBOpmDependencies.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencies') and hasattr(new_obj, 'db_deleted_dependencies'):
for obj in old_obj.db_deleted_dependencies:
n_obj = DBOpmDependencies.update_version(obj, trans_dict)
new_obj.db_deleted_dependencies.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_accounts is not None:
children.extend(self._db_accounts.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_accounts = None
if self._db_processes is not None:
children.extend(self._db_processes.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_processes = None
if self._db_artifacts is not None:
children.extend(self._db_artifacts.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_artifacts = None
if self._db_agents is not None:
children.extend(self._db_agents.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_agents = None
if self._db_dependencies is not None:
children.extend(self._db_dependencies.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_dependencies = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_processes)
children.extend(self.db_deleted_artifacts)
children.extend(self.db_deleted_agents)
children.extend(self.db_deleted_dependencies)
if remove:
self.db_deleted_accounts = []
self.db_deleted_processes = []
self.db_deleted_artifacts = []
self.db_deleted_agents = []
self.db_deleted_dependencies = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_accounts is not None and self._db_accounts.has_changes():
return True
if self._db_processes is not None and self._db_processes.has_changes():
return True
if self._db_artifacts is not None and self._db_artifacts.has_changes():
return True
if self._db_agents is not None and self._db_agents.has_changes():
return True
if self._db_dependencies is not None and self._db_dependencies.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_add_accounts(self, accounts):
self._db_accounts = accounts
def db_change_accounts(self, accounts):
self._db_accounts = accounts
def db_delete_accounts(self, accounts):
if not self.is_new:
self.db_deleted_accounts.append(self._db_accounts)
self._db_accounts = None
def __get_db_processes(self):
return self._db_processes
def __set_db_processes(self, processes):
self._db_processes = processes
self.is_dirty = True
db_processes = property(__get_db_processes, __set_db_processes)
def db_add_processes(self, processes):
self._db_processes = processes
def db_change_processes(self, processes):
self._db_processes = processes
def db_delete_processes(self, processes):
if not self.is_new:
self.db_deleted_processes.append(self._db_processes)
self._db_processes = None
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_add_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_change_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_delete_artifacts(self, artifacts):
if not self.is_new:
self.db_deleted_artifacts.append(self._db_artifacts)
self._db_artifacts = None
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_add_agents(self, agents):
self._db_agents = agents
def db_change_agents(self, agents):
self._db_agents = agents
def db_delete_agents(self, agents):
if not self.is_new:
self.db_deleted_agents.append(self._db_agents)
self._db_agents = None
def __get_db_dependencies(self):
return self._db_dependencies
def __set_db_dependencies(self, dependencies):
self._db_dependencies = dependencies
self.is_dirty = True
db_dependencies = property(__get_db_dependencies, __set_db_dependencies)
def db_add_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_change_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_delete_dependencies(self, dependencies):
if not self.is_new:
self.db_deleted_dependencies.append(self._db_dependencies)
self._db_dependencies = None
class DBRegistry(object):
vtType = 'registry'
def __init__(self, id=None, entity_type=None, version=None, root_descriptor_id=None, name=None, last_modified=None, packages=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_root_descriptor_id = root_descriptor_id
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_packages = []
self.db_packages_id_index = {}
self.db_packages_identifier_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_id_index[v.db_id] = v
self.db_packages_identifier_index[(v.db_identifier,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRegistry.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRegistry(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
root_descriptor_id=self._db_root_descriptor_id,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_root_descriptor_id') and ('module_descriptor', self._db_root_descriptor_id) in id_remap:
cp._db_root_descriptor_id = id_remap[('module_descriptor', self._db_root_descriptor_id)]
# recreate indices and set flags
cp.db_packages_id_index = dict((v.db_id, v) for v in cp._db_packages)
cp.db_packages_identifier_index = dict(((v.db_identifier,v.db_version), v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRegistry()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'root_descriptor_id' in class_dict:
res = class_dict['root_descriptor_id'](old_obj, trans_dict)
new_obj.db_root_descriptor_id = res
elif hasattr(old_obj, 'db_root_descriptor_id') and old_obj.db_root_descriptor_id is not None:
new_obj.db_root_descriptor_id = old_obj.db_root_descriptor_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_root_descriptor_id(self):
return self._db_root_descriptor_id
def __set_db_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
self.is_dirty = True
db_root_descriptor_id = property(__get_db_root_descriptor_id, __set_db_root_descriptor_id)
def db_add_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_change_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_delete_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_change_package(self, package):
self.is_dirty = True
found = False
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
self._db_packages[i] = package
found = True
break
if not found:
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_delete_package(self, package):
self.is_dirty = True
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
if not self._db_packages[i].is_new:
self.db_deleted_packages.append(self._db_packages[i])
del self._db_packages[i]
break
del self.db_packages_id_index[package.db_id]
del self.db_packages_identifier_index[(package.db_identifier,package.db_version)]
def db_get_package(self, key):
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == key:
return self._db_packages[i]
return None
def db_get_package_by_id(self, key):
return self.db_packages_id_index[key]
def db_has_package_with_id(self, key):
return key in self.db_packages_id_index
def db_get_package_by_identifier(self, key):
return self.db_packages_identifier_index[key]
def db_has_package_with_identifier(self, key):
return key in self.db_packages_identifier_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAccount(object):
vtType = 'opm_account'
def __init__(self, id=None, value=None):
self._db_id = id
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccount.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccount(id=self._db_id,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccount()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBAnnotation(object):
vtType = 'annotation'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBChange(object):
vtType = 'change'
def __init__(self, data=None, id=None, what=None, oldObjId=None, newObjId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_oldObjId = oldObjId
self._db_newObjId = newObjId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBChange.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBChange(id=self._db_id,
what=self._db_what,
oldObjId=self._db_oldObjId,
newObjId=self._db_newObjId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_oldObjId') and (self._db_what, self._db_oldObjId) in id_remap:
cp._db_oldObjId = id_remap[(self._db_what, self._db_oldObjId)]
if hasattr(self, 'db_newObjId') and (self._db_what, self._db_newObjId) in id_remap:
cp._db_newObjId = id_remap[(self._db_what, self._db_newObjId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBChange()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'oldObjId' in class_dict:
res = class_dict['oldObjId'](old_obj, trans_dict)
new_obj.db_oldObjId = res
elif hasattr(old_obj, 'db_oldObjId') and old_obj.db_oldObjId is not None:
new_obj.db_oldObjId = old_obj.db_oldObjId
if 'newObjId' in class_dict:
res = class_dict['newObjId'](old_obj, trans_dict)
new_obj.db_newObjId = res
elif hasattr(old_obj, 'db_newObjId') and old_obj.db_newObjId is not None:
new_obj.db_newObjId = old_obj.db_newObjId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_oldObjId(self):
return self._db_oldObjId
def __set_db_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
self.is_dirty = True
db_oldObjId = property(__get_db_oldObjId, __set_db_oldObjId)
def db_add_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_change_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_delete_oldObjId(self, oldObjId):
self._db_oldObjId = None
def __get_db_newObjId(self):
return self._db_newObjId
def __set_db_newObjId(self, newObjId):
self._db_newObjId = newObjId
self.is_dirty = True
db_newObjId = property(__get_db_newObjId, __set_db_newObjId)
def db_add_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_change_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_delete_newObjId(self, newObjId):
self._db_newObjId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBOpmWasDerivedFrom(object):
vtType = 'opm_was_derived_from'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasDerivedFrom.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasDerivedFrom()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasDerivedFrom()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBOpmWasControlledBy(object):
vtType = 'opm_was_controlled_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, starts=None, ends=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_starts = []
if starts is None:
self._db_starts = []
else:
self._db_starts = starts
self.db_deleted_ends = []
if ends is None:
self._db_ends = []
else:
self._db_ends = ends
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasControlledBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasControlledBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_starts is None:
cp._db_starts = []
else:
cp._db_starts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_starts]
if self._db_ends is None:
cp._db_ends = []
else:
cp._db_ends = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ends]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasControlledBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmAgentId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmAgentId.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'starts' in class_dict:
res = class_dict['starts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_start(obj)
elif hasattr(old_obj, 'db_starts') and old_obj.db_starts is not None:
for obj in old_obj.db_starts:
new_obj.db_add_start(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_starts') and hasattr(new_obj, 'db_deleted_starts'):
for obj in old_obj.db_deleted_starts:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_starts.append(n_obj)
if 'ends' in class_dict:
res = class_dict['ends'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_end(obj)
elif hasattr(old_obj, 'db_ends') and old_obj.db_ends is not None:
for obj in old_obj.db_ends:
new_obj.db_add_end(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ends') and hasattr(new_obj, 'db_deleted_ends'):
for obj in old_obj.db_deleted_ends:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_ends.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_starts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_start(child)
to_del = []
for child in self.db_ends:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_end(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_starts)
children.extend(self.db_deleted_ends)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_starts = []
self.db_deleted_ends = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_starts:
if child.has_changes():
return True
for child in self._db_ends:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_starts(self):
return self._db_starts
def __set_db_starts(self, starts):
self._db_starts = starts
self.is_dirty = True
db_starts = property(__get_db_starts, __set_db_starts)
def db_get_starts(self):
return self._db_starts
def db_add_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_change_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_delete_start(self, start):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_start(self, key):
return None
def __get_db_ends(self):
return self._db_ends
def __set_db_ends(self, ends):
self._db_ends = ends
self.is_dirty = True
db_ends = property(__get_db_ends, __set_db_ends)
def db_get_ends(self):
return self._db_ends
def db_add_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_change_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_delete_end(self, end):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_end(self, key):
return None
class DBOpmAgentId(object):
vtType = 'opm_agent_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgentId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgentId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_agent', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_agent', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgentId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBGroupExec(object):
vtType = 'group_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, group_name=None, group_type=None, completed=None, error=None, machine_id=None, annotations=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_group_name = group_name
self._db_group_type = group_type
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroupExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroupExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
group_name=self._db_group_name,
group_type=self._db_group_type,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroupExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'group_name' in class_dict:
res = class_dict['group_name'](old_obj, trans_dict)
new_obj.db_group_name = res
elif hasattr(old_obj, 'db_group_name') and old_obj.db_group_name is not None:
new_obj.db_group_name = old_obj.db_group_name
if 'group_type' in class_dict:
res = class_dict['group_type'](old_obj, trans_dict)
new_obj.db_group_type = res
elif hasattr(old_obj, 'db_group_type') and old_obj.db_group_type is not None:
new_obj.db_group_type = old_obj.db_group_type
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_group_name(self):
return self._db_group_name
def __set_db_group_name(self, group_name):
self._db_group_name = group_name
self.is_dirty = True
db_group_name = property(__get_db_group_name, __set_db_group_name)
def db_add_group_name(self, group_name):
self._db_group_name = group_name
def db_change_group_name(self, group_name):
self._db_group_name = group_name
def db_delete_group_name(self, group_name):
self._db_group_name = None
def __get_db_group_type(self):
return self._db_group_type
def __set_db_group_type(self, group_type):
self._db_group_type = group_type
self.is_dirty = True
db_group_type = property(__get_db_group_type, __set_db_group_type)
def db_add_group_type(self, group_type):
self._db_group_type = group_type
def db_change_group_type(self, group_type):
self._db_group_type = group_type
def db_delete_group_type(self, group_type):
self._db_group_type = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def getPrimaryKey(self):
return self._db_id
class DBOpmTime(object):
vtType = 'opm_time'
def __init__(self, no_later_than=None, no_earlier_than=None, clock_id=None):
self._db_no_later_than = no_later_than
self._db_no_earlier_than = no_earlier_than
self._db_clock_id = clock_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmTime.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmTime(no_later_than=self._db_no_later_than,
no_earlier_than=self._db_no_earlier_than,
clock_id=self._db_clock_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmTime()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'no_later_than' in class_dict:
res = class_dict['no_later_than'](old_obj, trans_dict)
new_obj.db_no_later_than = res
elif hasattr(old_obj, 'db_no_later_than') and old_obj.db_no_later_than is not None:
new_obj.db_no_later_than = old_obj.db_no_later_than
if 'no_earlier_than' in class_dict:
res = class_dict['no_earlier_than'](old_obj, trans_dict)
new_obj.db_no_earlier_than = res
elif hasattr(old_obj, 'db_no_earlier_than') and old_obj.db_no_earlier_than is not None:
new_obj.db_no_earlier_than = old_obj.db_no_earlier_than
if 'clock_id' in class_dict:
res = class_dict['clock_id'](old_obj, trans_dict)
new_obj.db_clock_id = res
elif hasattr(old_obj, 'db_clock_id') and old_obj.db_clock_id is not None:
new_obj.db_clock_id = old_obj.db_clock_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_no_later_than(self):
return self._db_no_later_than
def __set_db_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
self.is_dirty = True
db_no_later_than = property(__get_db_no_later_than, __set_db_no_later_than)
def db_add_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_change_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_delete_no_later_than(self, no_later_than):
self._db_no_later_than = None
def __get_db_no_earlier_than(self):
return self._db_no_earlier_than
def __set_db_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
self.is_dirty = True
db_no_earlier_than = property(__get_db_no_earlier_than, __set_db_no_earlier_than)
def db_add_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_change_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_delete_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = None
def __get_db_clock_id(self):
return self._db_clock_id
def __set_db_clock_id(self, clock_id):
self._db_clock_id = clock_id
self.is_dirty = True
db_clock_id = property(__get_db_clock_id, __set_db_clock_id)
def db_add_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_change_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_delete_clock_id(self, clock_id):
self._db_clock_id = None
class DBPackage(object):
vtType = 'package'
def __init__(self, id=None, name=None, identifier=None, codepath=None, load_configuration=None, version=None, description=None, module_descriptors=None):
self._db_id = id
self._db_name = name
self._db_identifier = identifier
self._db_codepath = codepath
self._db_load_configuration = load_configuration
self._db_version = version
self._db_description = description
self.db_deleted_module_descriptors = []
self.db_module_descriptors_id_index = {}
self.db_module_descriptors_name_index = {}
if module_descriptors is None:
self._db_module_descriptors = []
else:
self._db_module_descriptors = module_descriptors
for v in self._db_module_descriptors:
self.db_module_descriptors_id_index[v.db_id] = v
self.db_module_descriptors_name_index[(v.db_name,v.db_namespace,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPackage(id=self._db_id,
name=self._db_name,
identifier=self._db_identifier,
codepath=self._db_codepath,
load_configuration=self._db_load_configuration,
version=self._db_version,
description=self._db_description)
if self._db_module_descriptors is None:
cp._db_module_descriptors = []
else:
cp._db_module_descriptors = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_descriptors]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_module_descriptors_id_index = dict((v.db_id, v) for v in cp._db_module_descriptors)
cp.db_module_descriptors_name_index = dict(((v.db_name,v.db_namespace,v.db_version), v) for v in cp._db_module_descriptors)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'identifier' in class_dict:
res = class_dict['identifier'](old_obj, trans_dict)
new_obj.db_identifier = res
elif hasattr(old_obj, 'db_identifier') and old_obj.db_identifier is not None:
new_obj.db_identifier = old_obj.db_identifier
if 'codepath' in class_dict:
res = class_dict['codepath'](old_obj, trans_dict)
new_obj.db_codepath = res
elif hasattr(old_obj, 'db_codepath') and old_obj.db_codepath is not None:
new_obj.db_codepath = old_obj.db_codepath
if 'load_configuration' in class_dict:
res = class_dict['load_configuration'](old_obj, trans_dict)
new_obj.db_load_configuration = res
elif hasattr(old_obj, 'db_load_configuration') and old_obj.db_load_configuration is not None:
new_obj.db_load_configuration = old_obj.db_load_configuration
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'description' in class_dict:
res = class_dict['description'](old_obj, trans_dict)
new_obj.db_description = res
elif hasattr(old_obj, 'db_description') and old_obj.db_description is not None:
new_obj.db_description = old_obj.db_description
if 'module_descriptors' in class_dict:
res = class_dict['module_descriptors'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_descriptor(obj)
elif hasattr(old_obj, 'db_module_descriptors') and old_obj.db_module_descriptors is not None:
for obj in old_obj.db_module_descriptors:
new_obj.db_add_module_descriptor(DBModuleDescriptor.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_descriptors') and hasattr(new_obj, 'db_deleted_module_descriptors'):
for obj in old_obj.db_deleted_module_descriptors:
n_obj = DBModuleDescriptor.update_version(obj, trans_dict)
new_obj.db_deleted_module_descriptors.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_module_descriptors:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_descriptor(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_module_descriptors)
if remove:
self.db_deleted_module_descriptors = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_module_descriptors:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_identifier(self):
return self._db_identifier
def __set_db_identifier(self, identifier):
self._db_identifier = identifier
self.is_dirty = True
db_identifier = property(__get_db_identifier, __set_db_identifier)
def db_add_identifier(self, identifier):
self._db_identifier = identifier
def db_change_identifier(self, identifier):
self._db_identifier = identifier
def db_delete_identifier(self, identifier):
self._db_identifier = None
def __get_db_codepath(self):
return self._db_codepath
def __set_db_codepath(self, codepath):
self._db_codepath = codepath
self.is_dirty = True
db_codepath = property(__get_db_codepath, __set_db_codepath)
def db_add_codepath(self, codepath):
self._db_codepath = codepath
def db_change_codepath(self, codepath):
self._db_codepath = codepath
def db_delete_codepath(self, codepath):
self._db_codepath = None
def __get_db_load_configuration(self):
return self._db_load_configuration
def __set_db_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
self.is_dirty = True
db_load_configuration = property(__get_db_load_configuration, __set_db_load_configuration)
def db_add_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_change_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_delete_load_configuration(self, load_configuration):
self._db_load_configuration = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_description(self):
return self._db_description
def __set_db_description(self, description):
self._db_description = description
self.is_dirty = True
db_description = property(__get_db_description, __set_db_description)
def db_add_description(self, description):
self._db_description = description
def db_change_description(self, description):
self._db_description = description
def db_delete_description(self, description):
self._db_description = None
def __get_db_module_descriptors(self):
return self._db_module_descriptors
def __set_db_module_descriptors(self, module_descriptors):
self._db_module_descriptors = module_descriptors
self.is_dirty = True
db_module_descriptors = property(__get_db_module_descriptors, __set_db_module_descriptors)
def db_get_module_descriptors(self):
return self._db_module_descriptors
def db_add_module_descriptor(self, module_descriptor):
self.is_dirty = True
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_change_module_descriptor(self, module_descriptor):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
self._db_module_descriptors[i] = module_descriptor
found = True
break
if not found:
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_delete_module_descriptor(self, module_descriptor):
self.is_dirty = True
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
if not self._db_module_descriptors[i].is_new:
self.db_deleted_module_descriptors.append(self._db_module_descriptors[i])
del self._db_module_descriptors[i]
break
del self.db_module_descriptors_id_index[module_descriptor.db_id]
del self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)]
def db_get_module_descriptor(self, key):
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == key:
return self._db_module_descriptors[i]
return None
def db_get_module_descriptor_by_id(self, key):
return self.db_module_descriptors_id_index[key]
def db_has_module_descriptor_with_id(self, key):
return key in self.db_module_descriptors_id_index
def db_get_module_descriptor_by_name(self, key):
return self.db_module_descriptors_name_index[key]
def db_has_module_descriptor_with_name(self, key):
return key in self.db_module_descriptors_name_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflowExec(object):
vtType = 'workflow_exec'
def __init__(self, item_execs=None, id=None, user=None, ip=None, session=None, vt_version=None, ts_start=None, ts_end=None, parent_id=None, parent_type=None, parent_version=None, completed=None, name=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_user = user
self._db_ip = ip
self._db_session = session
self._db_vt_version = vt_version
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_parent_id = parent_id
self._db_parent_type = parent_type
self._db_parent_version = parent_version
self._db_completed = completed
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflowExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflowExec(id=self._db_id,
user=self._db_user,
ip=self._db_ip,
session=self._db_session,
vt_version=self._db_vt_version,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
parent_id=self._db_parent_id,
parent_type=self._db_parent_type,
parent_version=self._db_parent_version,
completed=self._db_completed,
name=self._db_name)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflowExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'ip' in class_dict:
res = class_dict['ip'](old_obj, trans_dict)
new_obj.db_ip = res
elif hasattr(old_obj, 'db_ip') and old_obj.db_ip is not None:
new_obj.db_ip = old_obj.db_ip
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'parent_id' in class_dict:
res = class_dict['parent_id'](old_obj, trans_dict)
new_obj.db_parent_id = res
elif hasattr(old_obj, 'db_parent_id') and old_obj.db_parent_id is not None:
new_obj.db_parent_id = old_obj.db_parent_id
if 'parent_type' in class_dict:
res = class_dict['parent_type'](old_obj, trans_dict)
new_obj.db_parent_type = res
elif hasattr(old_obj, 'db_parent_type') and old_obj.db_parent_type is not None:
new_obj.db_parent_type = old_obj.db_parent_type
if 'parent_version' in class_dict:
res = class_dict['parent_version'](old_obj, trans_dict)
new_obj.db_parent_version = res
elif hasattr(old_obj, 'db_parent_version') and old_obj.db_parent_version is not None:
new_obj.db_parent_version = old_obj.db_parent_version
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_ip(self):
return self._db_ip
def __set_db_ip(self, ip):
self._db_ip = ip
self.is_dirty = True
db_ip = property(__get_db_ip, __set_db_ip)
def db_add_ip(self, ip):
self._db_ip = ip
def db_change_ip(self, ip):
self._db_ip = ip
def db_delete_ip(self, ip):
self._db_ip = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_parent_id(self):
return self._db_parent_id
def __set_db_parent_id(self, parent_id):
self._db_parent_id = parent_id
self.is_dirty = True
db_parent_id = property(__get_db_parent_id, __set_db_parent_id)
def db_add_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_change_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_delete_parent_id(self, parent_id):
self._db_parent_id = None
def __get_db_parent_type(self):
return self._db_parent_type
def __set_db_parent_type(self, parent_type):
self._db_parent_type = parent_type
self.is_dirty = True
db_parent_type = property(__get_db_parent_type, __set_db_parent_type)
def db_add_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_change_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_delete_parent_type(self, parent_type):
self._db_parent_type = None
def __get_db_parent_version(self):
return self._db_parent_version
def __set_db_parent_version(self, parent_version):
self._db_parent_version = parent_version
self.is_dirty = True
db_parent_version = property(__get_db_parent_version, __set_db_parent_version)
def db_add_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_change_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_delete_parent_version(self, parent_version):
self._db_parent_version = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBLoopExec(object):
vtType = 'loop_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, iteration=None, completed=None, error=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_iteration = iteration
self._db_completed = completed
self._db_error = error
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLoopExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLoopExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
iteration=self._db_iteration,
completed=self._db_completed,
error=self._db_error)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLoopExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'iteration' in class_dict:
res = class_dict['iteration'](old_obj, trans_dict)
new_obj.db_iteration = res
elif hasattr(old_obj, 'db_iteration') and old_obj.db_iteration is not None:
new_obj.db_iteration = old_obj.db_iteration
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_iteration(self):
return self._db_iteration
def __set_db_iteration(self, iteration):
self._db_iteration = iteration
self.is_dirty = True
db_iteration = property(__get_db_iteration, __set_db_iteration)
def db_add_iteration(self, iteration):
self._db_iteration = iteration
def db_change_iteration(self, iteration):
self._db_iteration = iteration
def db_delete_iteration(self, iteration):
self._db_iteration = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def getPrimaryKey(self):
return self._db_id
class DBConnection(object):
vtType = 'connection'
def __init__(self, id=None, ports=None):
self._db_id = id
self.db_deleted_ports = []
self.db_ports_id_index = {}
self.db_ports_type_index = {}
if ports is None:
self._db_ports = []
else:
self._db_ports = ports
for v in self._db_ports:
self.db_ports_id_index[v.db_id] = v
self.db_ports_type_index[v.db_type] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConnection(id=self._db_id)
if self._db_ports is None:
cp._db_ports = []
else:
cp._db_ports = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ports]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_ports_id_index = dict((v.db_id, v) for v in cp._db_ports)
cp.db_ports_type_index = dict((v.db_type, v) for v in cp._db_ports)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ports' in class_dict:
res = class_dict['ports'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_port(obj)
elif hasattr(old_obj, 'db_ports') and old_obj.db_ports is not None:
for obj in old_obj.db_ports:
new_obj.db_add_port(DBPort.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ports') and hasattr(new_obj, 'db_deleted_ports'):
for obj in old_obj.db_deleted_ports:
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_ports.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_ports:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_port(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_ports)
if remove:
self.db_deleted_ports = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_ports:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ports(self):
return self._db_ports
def __set_db_ports(self, ports):
self._db_ports = ports
self.is_dirty = True
db_ports = property(__get_db_ports, __set_db_ports)
def db_get_ports(self):
return self._db_ports
def db_add_port(self, port):
self.is_dirty = True
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_change_port(self, port):
self.is_dirty = True
found = False
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
self._db_ports[i] = port
found = True
break
if not found:
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_delete_port(self, port):
self.is_dirty = True
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
if not self._db_ports[i].is_new:
self.db_deleted_ports.append(self._db_ports[i])
del self._db_ports[i]
break
del self.db_ports_id_index[port.db_id]
del self.db_ports_type_index[port.db_type]
def db_get_port(self, key):
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == key:
return self._db_ports[i]
return None
def db_get_port_by_id(self, key):
return self.db_ports_id_index[key]
def db_has_port_with_id(self, key):
return key in self.db_ports_id_index
def db_get_port_by_type(self, key):
return self.db_ports_type_index[key]
def db_has_port_with_type(self, key):
return key in self.db_ports_type_index
def getPrimaryKey(self):
return self._db_id
class DBOpmProcess(object):
vtType = 'opm_process'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcess.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcess(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcess()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmProcessValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmProcessValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBOpmWasTriggeredBy(object):
vtType = 'opm_was_triggered_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasTriggeredBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasTriggeredBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasTriggeredBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBOpmProcessValue(object):
vtType = 'opm_process_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'module_exec':
new_obj.db_add_value(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_value(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_value(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
class DBAction(object):
vtType = 'action'
def __init__(self, operations=None, id=None, prevId=None, date=None, session=None, user=None, prune=None, annotations=None):
self.db_deleted_operations = []
self.db_operations_id_index = {}
if operations is None:
self._db_operations = []
else:
self._db_operations = operations
for v in self._db_operations:
self.db_operations_id_index[v.db_id] = v
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_session = session
self._db_user = user
self._db_prune = prune
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
session=self._db_session,
user=self._db_user,
prune=self._db_prune)
if self._db_operations is None:
cp._db_operations = []
else:
cp._db_operations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_operations]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('action', self._db_prevId)]
# recreate indices and set flags
cp.db_operations_id_index = dict((v.db_id, v) for v in cp._db_operations)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'operations' in class_dict:
res = class_dict['operations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_operation(obj)
elif hasattr(old_obj, 'db_operations') and old_obj.db_operations is not None:
for obj in old_obj.db_operations:
if obj.vtType == 'add':
new_obj.db_add_operation(DBAdd.update_version(obj, trans_dict))
elif obj.vtType == 'delete':
new_obj.db_add_operation(DBDelete.update_version(obj, trans_dict))
elif obj.vtType == 'change':
new_obj.db_add_operation(DBChange.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_operations') and hasattr(new_obj, 'db_deleted_operations'):
for obj in old_obj.db_deleted_operations:
if obj.vtType == 'add':
n_obj = DBAdd.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'delete':
n_obj = DBDelete.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'change':
n_obj = DBChange.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'prune' in class_dict:
res = class_dict['prune'](old_obj, trans_dict)
new_obj.db_prune = res
elif hasattr(old_obj, 'db_prune') and old_obj.db_prune is not None:
new_obj.db_prune = old_obj.db_prune
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_operations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_operation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_operations)
if remove:
self.db_deleted_annotations = []
self.db_deleted_operations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_operations:
if child.has_changes():
return True
return False
def __get_db_operations(self):
return self._db_operations
def __set_db_operations(self, operations):
self._db_operations = operations
self.is_dirty = True
db_operations = property(__get_db_operations, __set_db_operations)
def db_get_operations(self):
return self._db_operations
def db_add_operation(self, operation):
self.is_dirty = True
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_change_operation(self, operation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
self._db_operations[i] = operation
found = True
break
if not found:
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_delete_operation(self, operation):
self.is_dirty = True
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
if not self._db_operations[i].is_new:
self.db_deleted_operations.append(self._db_operations[i])
del self._db_operations[i]
break
del self.db_operations_id_index[operation.db_id]
def db_get_operation(self, key):
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == key:
return self._db_operations[i]
return None
def db_get_operation_by_id(self, key):
return self.db_operations_id_index[key]
def db_has_operation_with_id(self, key):
return key in self.db_operations_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_prune(self):
return self._db_prune
def __set_db_prune(self, prune):
self._db_prune = prune
self.is_dirty = True
db_prune = property(__get_db_prune, __set_db_prune)
def db_add_prune(self, prune):
self._db_prune = prune
def db_change_prune(self, prune):
self._db_prune = prune
def db_delete_prune(self, prune):
self._db_prune = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAgent(object):
vtType = 'opm_agent'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgent(id=self._db_id,
value=self._db_value)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBDelete(object):
vtType = 'delete'
def __init__(self, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDelete.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDelete(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDelete()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBVistrail(object):
vtType = 'vistrail'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, actions=None, tags=None, annotations=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_tags = []
self.db_tags_id_index = {}
self.db_tags_name_index = {}
if tags is None:
self._db_tags = []
else:
self._db_tags = tags
for v in self._db_tags:
self.db_tags_id_index[v.db_id] = v
self.db_tags_name_index[v.db_name] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrail(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_tags is None:
cp._db_tags = []
else:
cp._db_tags = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_tags]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_tags_id_index = dict((v.db_id, v) for v in cp._db_tags)
cp.db_tags_name_index = dict((v.db_name, v) for v in cp._db_tags)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'tags' in class_dict:
res = class_dict['tags'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_tag(obj)
elif hasattr(old_obj, 'db_tags') and old_obj.db_tags is not None:
for obj in old_obj.db_tags:
new_obj.db_add_tag(DBTag.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_tags') and hasattr(new_obj, 'db_deleted_tags'):
for obj in old_obj.db_deleted_tags:
n_obj = DBTag.update_version(obj, trans_dict)
new_obj.db_deleted_tags.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_tags:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_tag(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_tags)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_tags = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_tags:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_tags(self):
return self._db_tags
def __set_db_tags(self, tags):
self._db_tags = tags
self.is_dirty = True
db_tags = property(__get_db_tags, __set_db_tags)
def db_get_tags(self):
return self._db_tags
def db_add_tag(self, tag):
self.is_dirty = True
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_change_tag(self, tag):
self.is_dirty = True
found = False
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
self._db_tags[i] = tag
found = True
break
if not found:
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_delete_tag(self, tag):
self.is_dirty = True
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
if not self._db_tags[i].is_new:
self.db_deleted_tags.append(self._db_tags[i])
del self._db_tags[i]
break
del self.db_tags_id_index[tag.db_id]
del self.db_tags_name_index[tag.db_name]
def db_get_tag(self, key):
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == key:
return self._db_tags[i]
return None
def db_get_tag_by_id(self, key):
return self.db_tags_id_index[key]
def db_has_tag_with_id(self, key):
return key in self.db_tags_id_index
def db_get_tag_by_name(self, key):
return self.db_tags_name_index[key]
def db_has_tag_with_name(self, key):
return key in self.db_tags_name_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBModuleExec(object):
vtType = 'module_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, module_name=None, completed=None, error=None, machine_id=None, annotations=None, loop_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_module_name = module_name
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_loop_execs = []
self.db_loop_execs_id_index = {}
if loop_execs is None:
self._db_loop_execs = []
else:
self._db_loop_execs = loop_execs
for v in self._db_loop_execs:
self.db_loop_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
module_name=self._db_module_name,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_loop_execs is None:
cp._db_loop_execs = []
else:
cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'module_name' in class_dict:
res = class_dict['module_name'](old_obj, trans_dict)
new_obj.db_module_name = res
elif hasattr(old_obj, 'db_module_name') and old_obj.db_module_name is not None:
new_obj.db_module_name = old_obj.db_module_name
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'loop_execs' in class_dict:
res = class_dict['loop_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_loop_exec(obj)
elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None:
for obj in old_obj.db_loop_execs:
new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'):
for obj in old_obj.db_deleted_loop_execs:
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_loop_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_loop_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_loop_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_loop_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_loop_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_loop_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_module_name(self):
return self._db_module_name
def __set_db_module_name(self, module_name):
self._db_module_name = module_name
self.is_dirty = True
db_module_name = property(__get_db_module_name, __set_db_module_name)
def db_add_module_name(self, module_name):
self._db_module_name = module_name
def db_change_module_name(self, module_name):
self._db_module_name = module_name
def db_delete_module_name(self, module_name):
self._db_module_name = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_loop_execs(self):
return self._db_loop_execs
def __set_db_loop_execs(self, loop_execs):
self._db_loop_execs = loop_execs
self.is_dirty = True
db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs)
def db_get_loop_execs(self):
return self._db_loop_execs
def db_add_loop_exec(self, loop_exec):
self.is_dirty = True
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_change_loop_exec(self, loop_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
self._db_loop_execs[i] = loop_exec
found = True
break
if not found:
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_delete_loop_exec(self, loop_exec):
self.is_dirty = True
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
if not self._db_loop_execs[i].is_new:
self.db_deleted_loop_execs.append(self._db_loop_execs[i])
del self._db_loop_execs[i]
break
del self.db_loop_execs_id_index[loop_exec.db_id]
def db_get_loop_exec(self, key):
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == key:
return self._db_loop_execs[i]
return None
def db_get_loop_exec_by_id(self, key):
return self.db_loop_execs_id_index[key]
def db_has_loop_exec_with_id(self, key):
return key in self.db_loop_execs_id_index
def getPrimaryKey(self):
return self._db_id
|
tangfeixiong/nova
|
refs/heads/stable/juno
|
nova/conductor/rpcapi.py
|
21
|
# Copyright 2013 IBM Corp.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Client side of the conductor RPC API."""
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from nova.objects import base as objects_base
from nova import rpc
CONF = cfg.CONF
rpcapi_cap_opt = cfg.StrOpt('conductor',
help='Set a version cap for messages sent to conductor services')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
class ConductorAPI(object):
"""Client side of the conductor RPC API
API version history:
* 1.0 - Initial version.
* 1.1 - Added migration_update
* 1.2 - Added instance_get_by_uuid and instance_get_all_by_host
* 1.3 - Added aggregate_host_add and aggregate_host_delete
* 1.4 - Added migration_get
* 1.5 - Added bw_usage_update
* 1.6 - Added get_backdoor_port()
* 1.7 - Added aggregate_get_by_host, aggregate_metadata_add,
and aggregate_metadata_delete
* 1.8 - Added security_group_get_by_instance and
security_group_rule_get_by_security_group
* 1.9 - Added provider_fw_rule_get_all
* 1.10 - Added agent_build_get_by_triple
* 1.11 - Added aggregate_get
* 1.12 - Added block_device_mapping_update_or_create
* 1.13 - Added block_device_mapping_get_all_by_instance
* 1.14 - Added block_device_mapping_destroy
* 1.15 - Added instance_get_all_by_filters and
instance_get_all_hung_in_rebooting and
instance_get_active_by_window
Deprecated instance_get_all_by_host
* 1.16 - Added instance_destroy
* 1.17 - Added instance_info_cache_delete
* 1.18 - Added instance_type_get
* 1.19 - Added vol_get_usage_by_time and vol_usage_update
* 1.20 - Added migration_get_unconfirmed_by_dest_compute
* 1.21 - Added service_get_all_by
* 1.22 - Added ping
* 1.23 - Added instance_get_all
Un-Deprecate instance_get_all_by_host
* 1.24 - Added instance_get
* 1.25 - Added action_event_start and action_event_finish
* 1.26 - Added instance_info_cache_update
* 1.27 - Added service_create
* 1.28 - Added binary arg to service_get_all_by
* 1.29 - Added service_destroy
* 1.30 - Added migration_create
* 1.31 - Added migration_get_in_progress_by_host_and_node
* 1.32 - Added optional node to instance_get_all_by_host
* 1.33 - Added compute_node_create and compute_node_update
* 1.34 - Added service_update
* 1.35 - Added instance_get_active_by_window_joined
* 1.36 - Added instance_fault_create
* 1.37 - Added task_log_get, task_log_begin_task, task_log_end_task
* 1.38 - Added service name to instance_update
* 1.39 - Added notify_usage_exists
* 1.40 - Added security_groups_trigger_handler and
security_groups_trigger_members_refresh
Remove instance_get_active_by_window
* 1.41 - Added fixed_ip_get_by_instance, network_get,
instance_floating_address_get_all, quota_commit,
quota_rollback
* 1.42 - Added get_ec2_ids, aggregate_metadata_get_by_host
* 1.43 - Added compute_stop
* 1.44 - Added compute_node_delete
* 1.45 - Added project_id to quota_commit and quota_rollback
* 1.46 - Added compute_confirm_resize
* 1.47 - Added columns_to_join to instance_get_all_by_host and
instance_get_all_by_filters
* 1.48 - Added compute_unrescue
... Grizzly supports message version 1.48. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 1.48.
* 1.49 - Added columns_to_join to instance_get_by_uuid
* 1.50 - Added object_action() and object_class_action()
* 1.51 - Added the 'legacy' argument to
block_device_mapping_get_all_by_instance
* 1.52 - Pass instance objects for compute_confirm_resize
* 1.53 - Added compute_reboot
* 1.54 - Added 'update_cells' argument to bw_usage_update
* 1.55 - Pass instance objects for compute_stop
* 1.56 - Remove compute_confirm_resize and
migration_get_unconfirmed_by_dest_compute
* 1.57 - Remove migration_create()
* 1.58 - Remove migration_get()
... Havana supports message version 1.58. So, any changes to existing
methods in 1.x after that point should be done such that they can
handle the version_cap being set to 1.58.
* 1.59 - Remove instance_info_cache_update()
* 1.60 - Remove aggregate_metadata_add() and aggregate_metadata_delete()
* ... - Remove security_group_get_by_instance() and
security_group_rule_get_by_security_group()
* 1.61 - Return deleted instance from instance_destroy()
* 1.62 - Added object_backport()
* 1.63 - Changed the format of values['stats'] from a dict to a JSON string
in compute_node_update()
* 1.64 - Added use_slave to instance_get_all_filters()
- Remove instance_type_get()
- Remove aggregate_get()
- Remove aggregate_get_by_host()
- Remove instance_get()
- Remove migration_update()
- Remove block_device_mapping_destroy()
* 2.0 - Drop backwards compatibility
- Remove quota_rollback() and quota_commit()
- Remove aggregate_host_add() and aggregate_host_delete()
- Remove network_migrate_instance_start() and
network_migrate_instance_finish()
- Remove vol_get_usage_by_time
... Icehouse supports message version 2.0. So, any changes to
existing methods in 2.x after that point should be done such
that they can handle the version_cap being set to 2.0.
* Remove instance_destroy()
* Remove compute_unrescue()
* Remove instance_get_all_by_filters()
* Remove instance_get_active_by_window_joined()
* Remove instance_fault_create()
* Remove action_event_start() and action_event_finish()
* Remove instance_get_by_uuid()
* Remove agent_build_get_by_triple()
... Juno supports message version 2.0. So, any changes to
existing methods in 2.x after that point should be done such
that they can handle the version_cap being set to 2.0.
* 2.1 - Make notify_usage_exists() take an instance object
* Remove bw_usage_update()
* Remove notify_usage_exists()
... Kilo supports message version 2.1. So, any changes to
existing methods in 2.x after that point should be done such
that they can handle the version_cap being set to 2.1.
* Remove get_ec2_ids()
* Remove service_get_all_by()
* Remove service_create()
* Remove service_destroy()
* Remove service_update()
* Remove migration_get_in_progress_by_host_and_node()
* Remove aggregate_metadata_get_by_host()
* Remove block_device_mapping_update_or_create()
* Remove block_device_mapping_get_all_by_instance()
* Remove instance_get_all_by_host()
* Remove compute_node_update()
* Remove compute_node_delete()
* Remove security_groups_trigger_handler()
"""
VERSION_ALIASES = {
'grizzly': '1.48',
'havana': '1.58',
'icehouse': '2.0',
'juno': '2.0',
'kilo': '2.1',
}
def __init__(self):
super(ConductorAPI, self).__init__()
target = messaging.Target(topic=CONF.conductor.topic, version='2.0')
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.conductor,
CONF.upgrade_levels.conductor)
serializer = objects_base.NovaObjectSerializer()
self.client = rpc.get_client(target,
version_cap=version_cap,
serializer=serializer)
def instance_update(self, context, instance_uuid, updates,
service=None):
updates_p = jsonutils.to_primitive(updates)
cctxt = self.client.prepare()
return cctxt.call(context, 'instance_update',
instance_uuid=instance_uuid,
updates=updates_p,
service=service)
def provider_fw_rule_get_all(self, context):
cctxt = self.client.prepare()
return cctxt.call(context, 'provider_fw_rule_get_all')
def vol_usage_update(self, context, vol_id, rd_req, rd_bytes, wr_req,
wr_bytes, instance, last_refreshed=None,
update_totals=False):
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare()
return cctxt.call(context, 'vol_usage_update',
vol_id=vol_id, rd_req=rd_req,
rd_bytes=rd_bytes, wr_req=wr_req,
wr_bytes=wr_bytes,
instance=instance_p, last_refreshed=last_refreshed,
update_totals=update_totals)
def compute_node_create(self, context, values):
cctxt = self.client.prepare()
return cctxt.call(context, 'compute_node_create', values=values)
def task_log_get(self, context, task_name, begin, end, host, state=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'task_log_get',
task_name=task_name, begin=begin, end=end,
host=host, state=state)
def task_log_begin_task(self, context, task_name, begin, end, host,
task_items=None, message=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'task_log_begin_task',
task_name=task_name,
begin=begin, end=end, host=host,
task_items=task_items, message=message)
def task_log_end_task(self, context, task_name, begin, end, host, errors,
message=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'task_log_end_task',
task_name=task_name, begin=begin, end=end,
host=host, errors=errors, message=message)
def security_groups_trigger_members_refresh(self, context, group_ids):
cctxt = self.client.prepare()
return cctxt.call(context, 'security_groups_trigger_members_refresh',
group_ids=group_ids)
def object_class_action(self, context, objname, objmethod, objver,
args, kwargs):
cctxt = self.client.prepare()
return cctxt.call(context, 'object_class_action',
objname=objname, objmethod=objmethod,
objver=objver, args=args, kwargs=kwargs)
def object_action(self, context, objinst, objmethod, args, kwargs):
cctxt = self.client.prepare()
return cctxt.call(context, 'object_action', objinst=objinst,
objmethod=objmethod, args=args, kwargs=kwargs)
def object_backport(self, context, objinst, target_version):
cctxt = self.client.prepare()
return cctxt.call(context, 'object_backport', objinst=objinst,
target_version=target_version)
class ComputeTaskAPI(object):
"""Client side of the conductor 'compute' namespaced RPC API
API version history:
1.0 - Initial version (empty).
1.1 - Added unified migrate_server call.
1.2 - Added build_instances
1.3 - Added unshelve_instance
1.4 - Added reservations to migrate_server.
1.5 - Added the leagacy_bdm parameter to build_instances
1.6 - Made migrate_server use instance objects
1.7 - Do not send block_device_mapping and legacy_bdm to build_instances
1.8 - Add rebuild_instance
1.9 - Converted requested_networks to NetworkRequestList object
1.10 - Made migrate_server() and build_instances() send flavor objects
1.11 - Added clean_shutdown to migrate_server()
"""
def __init__(self):
super(ComputeTaskAPI, self).__init__()
target = messaging.Target(topic=CONF.conductor.topic,
namespace='compute_task',
version='1.0')
serializer = objects_base.NovaObjectSerializer()
self.client = rpc.get_client(target, serializer=serializer)
def migrate_server(self, context, instance, scheduler_hint, live, rebuild,
flavor, block_migration, disk_over_commit,
reservations=None, clean_shutdown=True):
kw = {'instance': instance, 'scheduler_hint': scheduler_hint,
'live': live, 'rebuild': rebuild, 'flavor': flavor,
'block_migration': block_migration,
'disk_over_commit': disk_over_commit,
'reservations': reservations,
'clean_shutdown': clean_shutdown}
version = '1.11'
if not self.client.can_send_version(version):
del kw['clean_shutdown']
version = '1.10'
if not self.client.can_send_version(version):
kw['flavor'] = objects_base.obj_to_primitive(flavor)
version = '1.6'
if not self.client.can_send_version(version):
kw['instance'] = jsonutils.to_primitive(
objects_base.obj_to_primitive(instance))
version = '1.4'
cctxt = self.client.prepare(version=version)
return cctxt.call(context, 'migrate_server', **kw)
def build_instances(self, context, instances, image, filter_properties,
admin_password, injected_files, requested_networks,
security_groups, block_device_mapping, legacy_bdm=True):
image_p = jsonutils.to_primitive(image)
version = '1.10'
if not self.client.can_send_version(version):
version = '1.9'
if 'instance_type' in filter_properties:
flavor = filter_properties['instance_type']
flavor_p = objects_base.obj_to_primitive(flavor)
filter_properties = dict(filter_properties,
instance_type=flavor_p)
kw = {'instances': instances, 'image': image_p,
'filter_properties': filter_properties,
'admin_password': admin_password,
'injected_files': injected_files,
'requested_networks': requested_networks,
'security_groups': security_groups}
if not self.client.can_send_version(version):
version = '1.8'
kw['requested_networks'] = kw['requested_networks'].as_tuples()
if not self.client.can_send_version('1.7'):
version = '1.5'
bdm_p = objects_base.obj_to_primitive(block_device_mapping)
kw.update({'block_device_mapping': bdm_p,
'legacy_bdm': legacy_bdm})
cctxt = self.client.prepare(version=version)
cctxt.cast(context, 'build_instances', **kw)
def unshelve_instance(self, context, instance):
cctxt = self.client.prepare(version='1.3')
cctxt.cast(context, 'unshelve_instance', instance=instance)
def rebuild_instance(self, ctxt, instance, new_pass, injected_files,
image_ref, orig_image_ref, orig_sys_metadata, bdms,
recreate=False, on_shared_storage=False, host=None,
preserve_ephemeral=False, kwargs=None):
cctxt = self.client.prepare(version='1.8')
cctxt.cast(ctxt, 'rebuild_instance',
instance=instance, new_pass=new_pass,
injected_files=injected_files, image_ref=image_ref,
orig_image_ref=orig_image_ref,
orig_sys_metadata=orig_sys_metadata, bdms=bdms,
recreate=recreate, on_shared_storage=on_shared_storage,
preserve_ephemeral=preserve_ephemeral,
host=host)
|
s40523217/2016fallcp_hw
|
refs/heads/gh-pages
|
plugin/liquid_tags/notebook.py
|
26
|
"""
Notebook Tag
------------
This is a liquid-style tag to include a static html rendering of an IPython
notebook in a blog post.
Syntax
------
{% notebook filename.ipynb [ cells[start:end] ]%}
The file should be specified relative to the ``notebooks`` subdirectory of the
content directory. Optionally, this subdirectory can be specified in the
config file:
NOTEBOOK_DIR = 'notebooks'
The cells[start:end] statement is optional, and can be used to specify which
block of cells from the notebook to include.
Requirements
------------
- The plugin requires IPython version 1.0 or above. It no longer supports the
standalone nbconvert package, which has been deprecated.
Details
-------
Because the notebook relies on some rather extensive custom CSS, the use of
this plugin requires additional CSS to be inserted into the blog theme.
After typing "make html" when using the notebook tag, a file called
``_nb_header.html`` will be produced in the main directory. The content
of the file should be included in the header of the theme. An easy way
to accomplish this is to add the following lines within the header template
of the theme you use:
{% if EXTRA_HEADER %}
{{ EXTRA_HEADER }}
{% endif %}
and in your ``pelicanconf.py`` file, include the line:
EXTRA_HEADER = open('_nb_header.html').read().decode('utf-8')
this will insert the appropriate CSS. All efforts have been made to ensure
that this CSS will not override formats within the blog theme, but there may
still be some conflicts.
"""
import re
import os
from functools import partial
from .mdx_liquid_tags import LiquidTags
import IPython
IPYTHON_VERSION = IPython.version_info[0]
try:
import nbformat
except:
pass
if not IPYTHON_VERSION >= 1:
raise ValueError("IPython version 1.0+ required for notebook tag")
try:
from nbconvert.filters.highlight import _pygments_highlight
except ImportError:
try:
from IPython.nbconvert.filters.highlight import _pygments_highlight
except ImportError:
# IPython < 2.0
from IPython.nbconvert.filters.highlight import _pygment_highlight as _pygments_highlight
from pygments.formatters import HtmlFormatter
try:
from nbconvert.exporters import HTMLExporter
except ImportError:
from IPython.nbconvert.exporters import HTMLExporter
try:
from traitlets.config import Config
except ImportError:
from IPython.config import Config
try:
from nbconvert.preprocessors import Preprocessor
except ImportError:
try:
from IPython.nbconvert.preprocessors import Preprocessor
except ImportError:
# IPython < 2.0
from IPython.nbconvert.transformers import Transformer as Preprocessor
try:
from traitlets import Integer
except ImportError:
from IPython.utils.traitlets import Integer
from copy import deepcopy
#----------------------------------------------------------------------
# Some code that will be added to the header:
# Some of the following javascript/css include is adapted from
# IPython/nbconvert/templates/fullhtml.tpl, while some are custom tags
# specifically designed to make the results look good within the
# pelican-octopress theme.
JS_INCLUDE = r"""
<style type="text/css">
/* Overrides of notebook CSS for static HTML export */
div.entry-content {
overflow: visible;
padding: 8px;
}
.input_area {
padding: 0.2em;
}
a.heading-anchor {
white-space: normal;
}
.rendered_html
code {
font-size: .8em;
}
pre.ipynb {
color: black;
background: #f7f7f7;
border: none;
box-shadow: none;
margin-bottom: 0;
padding: 0;
margin: 0px;
font-size: 13px;
}
/* remove the prompt div from text cells */
div.text_cell .prompt {
display: none;
}
/* remove horizontal padding from text cells, */
/* so it aligns with outer body text */
div.text_cell_render {
padding: 0.5em 0em;
}
img.anim_icon{padding:0; border:0; vertical-align:middle; -webkit-box-shadow:none; -box-shadow:none}
div.collapseheader {
width=100%;
background-color:#d3d3d3;
padding: 2px;
cursor: pointer;
font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;
}
</style>
<script src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML" type="text/javascript"></script>
<script type="text/javascript">
init_mathjax = function() {
if (window.MathJax) {
// MathJax loaded
MathJax.Hub.Config({
tex2jax: {
inlineMath: [ ['$','$'], ["\\(","\\)"] ],
displayMath: [ ['$$','$$'], ["\\[","\\]"] ]
},
displayAlign: 'left', // Change this to 'center' to center equations.
"HTML-CSS": {
styles: {'.MathJax_Display': {"margin": 0}}
}
});
MathJax.Hub.Queue(["Typeset",MathJax.Hub]);
}
}
init_mathjax();
</script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<script type="text/javascript">
jQuery(document).ready(function($) {
$("div.collapseheader").click(function () {
$header = $(this).children("span").first();
$codearea = $(this).children(".input_area");
console.log($(this).children());
$codearea.slideToggle(500, function () {
$header.text(function () {
return $codearea.is(":visible") ? "Collapse Code" : "Expand Code";
});
});
});
});
</script>
"""
CSS_WRAPPER = """
<style type="text/css">
{0}
</style>
"""
#----------------------------------------------------------------------
# Create a custom preprocessor
class SliceIndex(Integer):
"""An integer trait that accepts None"""
default_value = None
def validate(self, obj, value):
if value is None:
return value
else:
return super(SliceIndex, self).validate(obj, value)
class SubCell(Preprocessor):
"""A transformer to select a slice of the cells of a notebook"""
start = SliceIndex(0, config=True,
help="first cell of notebook to be converted")
end = SliceIndex(None, config=True,
help="last cell of notebook to be converted")
def preprocess(self, nb, resources):
nbc = deepcopy(nb)
if IPYTHON_VERSION < 3:
for worksheet in nbc.worksheets:
cells = worksheet.cells[:]
worksheet.cells = cells[self.start:self.end]
else:
nbc.cells = nbc.cells[self.start:self.end]
return nbc, resources
call = preprocess # IPython < 2.0
#----------------------------------------------------------------------
# Custom highlighter:
# instead of using class='highlight', use class='highlight-ipynb'
def custom_highlighter(source, language='ipython', metadata=None):
formatter = HtmlFormatter(cssclass='highlight-ipynb')
if not language:
language = 'ipython'
output = _pygments_highlight(source, formatter, language)
return output.replace('<pre>', '<pre class="ipynb">')
#----------------------------------------------------------------------
# Below is the pelican plugin code.
#
SYNTAX = "{% notebook /path/to/notebook.ipynb [ cells[start:end] ] [ language[language] ] %}"
FORMAT = re.compile(r"""^(\s+)?(?P<src>\S+)(\s+)?((cells\[)(?P<start>-?[0-9]*):(?P<end>-?[0-9]*)(\]))?(\s+)?((language\[)(?P<language>-?[a-z0-9\+\-]*)(\]))?(\s+)?$""")
@LiquidTags.register('notebook')
def notebook(preprocessor, tag, markup):
match = FORMAT.search(markup)
if match:
argdict = match.groupdict()
src = argdict['src']
start = argdict['start']
end = argdict['end']
language = argdict['language']
else:
raise ValueError("Error processing input, "
"expected syntax: {0}".format(SYNTAX))
if start:
start = int(start)
else:
start = 0
if end:
end = int(end)
else:
end = None
language_applied_highlighter = partial(custom_highlighter, language=language)
nb_dir = preprocessor.configs.getConfig('NOTEBOOK_DIR')
nb_path = os.path.join('content', nb_dir, src)
if not os.path.exists(nb_path):
raise ValueError("File {0} could not be found".format(nb_path))
# Create the custom notebook converter
c = Config({'CSSHTMLHeaderTransformer':
{'enabled':True, 'highlight_class':'.highlight-ipynb'},
'SubCell':
{'enabled':True, 'start':start, 'end':end}})
template_file = 'basic'
if IPYTHON_VERSION >= 3:
if os.path.exists('pelicanhtml_3.tpl'):
template_file = 'pelicanhtml_3'
elif IPYTHON_VERSION == 2:
if os.path.exists('pelicanhtml_2.tpl'):
template_file = 'pelicanhtml_2'
else:
if os.path.exists('pelicanhtml_1.tpl'):
template_file = 'pelicanhtml_1'
if IPYTHON_VERSION >= 2:
subcell_kwarg = dict(preprocessors=[SubCell])
else:
subcell_kwarg = dict(transformers=[SubCell])
exporter = HTMLExporter(config=c,
template_file=template_file,
filters={'highlight2html': language_applied_highlighter},
**subcell_kwarg)
# read and parse the notebook
with open(nb_path, encoding="utf-8") as f:
nb_text = f.read()
if IPYTHON_VERSION < 3:
nb_json = IPython.nbformat.current.reads_json(nb_text)
else:
try:
nb_json = nbformat.reads(nb_text, as_version=4)
except:
nb_json = IPython.nbformat.reads(nb_text, as_version=4)
(body, resources) = exporter.from_notebook_node(nb_json)
# if we haven't already saved the header, save it here.
if not notebook.header_saved:
print ("\n ** Writing styles to _nb_header.html: "
"this should be included in the theme. **\n")
header = '\n'.join(CSS_WRAPPER.format(css_line)
for css_line in resources['inlining']['css'])
header += JS_INCLUDE
with open('_nb_header.html', 'w', encoding="utf-8") as f:
f.write(header)
notebook.header_saved = True
# this will stash special characters so that they won't be transformed
# by subsequent processes.
body = preprocessor.configs.htmlStash.store(body, safe=True)
return body
notebook.header_saved = False
#----------------------------------------------------------------------
# This import allows notebook to be a Pelican plugin
from liquid_tags import register
|
public0821/xxx
|
refs/heads/master
|
test/vm/virtualbox.py
|
1
|
import os
import logging
import sys
import shutil
import time
import platform
from pylib import shell
class VirtualBox(object):
def __init__(self, vboxmanage=None):
if vboxmanage:
self.__vboxmanage = vboxmanage
else:
self.__vboxmanage = self.scan_vboxmanage()
def scan_vboxmanage(self):
path = shutil.which("VBoxManage")
if path:
return path
if platform.system() == "Windows":
default_paths = ["C:\Program Files\Oracle\VirtualBox","D:\Program Files\Oracle\VirtualBox"]
for p in default_paths:
path = os.path.join(p, "VBoxManage.exe")
if os.path.exists(path):
return path
raise Exception("can't find install path of VirtualBox")
def __getoutput(self, cmd):
command = self.__vboxmanage + " " + cmd
return shell.getoutput(command, shell=True).strip().split(os.linesep)
def __run_command(self, cmd):
command = self.__vboxmanage + " " + cmd
shell.run_command(command, shell=True)
def get_vm_list(self):
lines = self.__getoutput("list vms")
vms = []
for line in lines:
if len(line) > 0:
vms.append(line.split()[0][1:-1])
return vms
def __get_default_machine_folder(self):
lines = self.__getoutput("list systemproperties")
for line in lines:
if line.startswith("Default machine folder"):
return line.split(":")[-1].strip()
raise Exception("get default machine folder failed")
# running, poweroff, saved, paused
def vmstate(self, vmname):
lines = self.__getoutput("showvminfo {} --machinereadable|grep VMState=".format(vmname))
return lines[0].split('VMState="')[-1][:-1]
def stop(self, vmname):
self.__run_command("controlvm {} poweroff".format(vmname))
# disk size (G)
def create(self, vmname, memory, disk_size, bridge_adapter, iso, force=False, disk_path=None):
vms = self.get_vm_list()
logging.debug("existing vms: " + str(vms))
if vmname in vms:
if force:
self.remove(vmname, True)
else:
raise Exception("there is a VM with the same name, if you want to recreate it, please specify --force in the command line")
if disk_path:
vdi = "{0}/{1}.vdi".format(disk_path, vmname)
else:
vdi = "{0}/{1}.vdi".format(self.__get_default_machine_folder(), vmname)
logging.debug(vdi)
cmds = []
self.__run_command("createvm --name {0} --ostype Debian_64 --register".format(vmname))
self.__run_command("modifyvm {0} --memory {1}".format(vmname, memory))
self.__run_command("modifyvm {0} --boot1 disk --boot2 dvd --boot3 net --boot4 none".format(vmname))
if bridge_adapter:
self.__run_command('modifyvm {0} --bridgeadapter1 "{1}"'.format(vmname, bridge_adapter))
self.__run_command("modifyvm {0} --nic1 bridged".format(vmname))
self.__run_command('createhd --filename "{0}" --size {1} --format VDI'.format(vdi, disk_size*1024))
self.__run_command("storagectl {0} --name SATA_Controller --add sata --controller IntelAhci".format(vmname))
self.__run_command('storageattach {0} --storagectl SATA_Controller --port 0 --device 0 --type hdd --medium "{1}"'.format(vmname, vdi))
self.__run_command("storagectl {0} --name IDE_Controller --add ide --controller PIIX4".format(vmname))
self.__run_command('storageattach {0} --storagectl IDE_Controller --port 1 --device 0 --type dvddrive --medium "{1}"'.format(vmname, iso))
def start(self, vmname):
self.__run_command("startvm " + vmname)
def remove(self, vmname, force):
if self.vmstate(vmname) == "running":
self.stop(vmname)
time.sleep(1) #TODO: use better method to detect machine is unlocked
self.__run_command("unregistervm {} --delete".format(vmname))
def get_nic_list(self):
nics = []
lines = self.__getoutput("list bridgedifs|grep ^Name:")
for line in lines:
nics.append(line.split("Name:")[-1].strip())
return nics
|
40223211/cadpbtest-0420
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/xml/etree/ElementInclude.py
|
784
|
#
# ElementTree
# $Id: ElementInclude.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
##
import copy
from . import ElementTree
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding (UTF-8 by default for "text").
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
if parse == "xml":
file = open(href, 'rb')
data = ElementTree.parse(file).getroot()
else:
if not encoding:
encoding = 'UTF-8'
file = open(href, 'r', encoding=encoding)
data = file.read()
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy.copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text + (e.tail or "")
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
|
mattray/stackalytics
|
refs/heads/master
|
tests/unit/test_data.py
|
12
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DEFAULT_DATA = {
'users': [
{
'launchpad_id': 'john_doe',
'user_name': 'John Doe',
'emails': ['johndoe@gmail.com', 'jdoe@nec.com'],
'companies': [
{'company_name': '*independent', 'end_date': '2013-May-01'},
{'company_name': 'NEC', 'end_date': None},
]
},
{
'launchpad_id': 'ivan_ivanov',
'user_name': 'Ivan Ivanov',
'emails': ['ivanivan@yandex.ru', 'iivanov@mirantis.com'],
'companies': [
{'company_name': 'Mirantis', 'end_date': None},
]
}
],
'companies': [
{
'company_name': '*independent',
'domains': ['']
},
{
'company_name': 'NEC',
'domains': ['nec.com', 'nec.co.jp']
},
{
'company_name': 'Mirantis',
'domains': ['mirantis.com', 'mirantis.ru']
},
],
'repos': [
{
'branches': ['master'],
'module': 'stackalytics',
'project_type': 'stackforge',
'uri': 'git://github.com/stackforge/stackalytics.git'
}
],
'releases': [
{
'release_name': 'prehistory',
'end_date': '2011-Apr-21'
},
{
'release_name': 'Havana',
'end_date': '2013-Oct-17'
}
]
}
USERS = DEFAULT_DATA['users']
REPOS = DEFAULT_DATA['repos']
COMPANIES = DEFAULT_DATA['companies']
RELEASES = DEFAULT_DATA['releases']
|
wisechengyi/pants
|
refs/heads/master
|
pants-plugins/tests/python/internal_backend_test/utilities/test_releases.py
|
2
|
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import unittest
from packaging.version import InvalidVersion, Version
from internal_backend.utilities.register import PantsReleases
def _branch_name(revision_str):
return PantsReleases._branch_name(Version(revision_str))
class ReleasesTest(unittest.TestCase):
def test_branch_name_master(self):
self.assertEqual("master", _branch_name("1.1.0-dev1"))
self.assertEqual("master", _branch_name("1.1.0dev1"))
def test_branch_name_stable(self):
self.assertEqual("1.1.x", _branch_name("1.1.0-rc1"))
self.assertEqual("1.1.x", _branch_name("1.1.0rc1"))
self.assertEqual("2.1.x", _branch_name("2.1.0"))
self.assertEqual("1.2.x", _branch_name("1.2.0rc0-12345"))
# A negative example: do not prepend `<number>.`, because
# the first two numbers will be taken as branch name.
self.assertEqual("12345.1.x", _branch_name("12345.1.2.0rc0"))
def test_invalid_test_branch_name_stable_append_alphabet(self):
with self.assertRaises(InvalidVersion):
_branch_name("1.2.0rc0-abcd")
def test_invalid_test_branch_name_stable_prepend_numbers(self):
with self.assertRaises(InvalidVersion):
_branch_name("12345-1.2.0rc0")
def test_branch_name_unknown_suffix(self):
with self.assertRaises(ValueError):
_branch_name("1.1.0-anything1")
|
savoirfairelinux/django
|
refs/heads/master
|
django/contrib/flatpages/sitemaps.py
|
729
|
from django.apps import apps as django_apps
from django.contrib.sitemaps import Sitemap
from django.core.exceptions import ImproperlyConfigured
class FlatPageSitemap(Sitemap):
def items(self):
if not django_apps.is_installed('django.contrib.sites'):
raise ImproperlyConfigured("FlatPageSitemap requires django.contrib.sites, which isn't installed.")
Site = django_apps.get_model('sites.Site')
current_site = Site.objects.get_current()
return current_site.flatpage_set.filter(registration_required=False)
|
Chilledheart/gyp
|
refs/heads/master
|
test/actions-subdir/src/make-file.py
|
489
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = 'Hello from make-file.py\n'
open(sys.argv[1], 'wb').write(contents)
|
ratpack/FrameworkBenchmarks
|
refs/heads/master
|
frameworks/Scala/play2-scala/setup_scala_slick.py
|
3
|
# This file was generated by frameworks/Java/play2-java/generate_config.py.
# Do not edit this file directly, use the script to regenerate.
from .setup_common import make_setup_for_dir
make_setup_for_dir(globals(), 'play2-scala-slick')
|
sander76/home-assistant
|
refs/heads/dev
|
homeassistant/components/supla/cover.py
|
15
|
"""Support for Supla cover - curtains, rollershutters, entry gate etc."""
import logging
from pprint import pformat
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_GARAGE,
CoverEntity,
)
from homeassistant.components.supla import (
DOMAIN,
SUPLA_COORDINATORS,
SUPLA_SERVERS,
SuplaChannel,
)
_LOGGER = logging.getLogger(__name__)
SUPLA_SHUTTER = "CONTROLLINGTHEROLLERSHUTTER"
SUPLA_GATE = "CONTROLLINGTHEGATE"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Supla covers."""
if discovery_info is None:
return
_LOGGER.debug("Discovery: %s", pformat(discovery_info))
entities = []
for device in discovery_info:
device_name = device["function_name"]
server_name = device["server_name"]
if device_name == SUPLA_SHUTTER:
entities.append(
SuplaCover(
device,
hass.data[DOMAIN][SUPLA_SERVERS][server_name],
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name],
)
)
elif device_name == SUPLA_GATE:
entities.append(
SuplaGateDoor(
device,
hass.data[DOMAIN][SUPLA_SERVERS][server_name],
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name],
)
)
async_add_entities(entities)
class SuplaCover(SuplaChannel, CoverEntity):
"""Representation of a Supla Cover."""
@property
def current_cover_position(self):
"""Return current position of cover. 0 is closed, 100 is open."""
state = self.channel_data.get("state")
if state:
return 100 - state["shut"]
return None
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION))
@property
def is_closed(self):
"""Return if the cover is closed."""
if self.current_cover_position is None:
return None
return self.current_cover_position == 0
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self.async_action("REVEAL")
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self.async_action("SHUT")
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.async_action("STOP")
class SuplaGateDoor(SuplaChannel, CoverEntity):
"""Representation of a Supla gate door."""
@property
def is_closed(self):
"""Return if the gate is closed or not."""
state = self.channel_data.get("state")
if state and "hi" in state:
return state.get("hi")
return None
async def async_open_cover(self, **kwargs) -> None:
"""Open the gate."""
if self.is_closed:
await self.async_action("OPEN_CLOSE")
async def async_close_cover(self, **kwargs) -> None:
"""Close the gate."""
if not self.is_closed:
await self.async_action("OPEN_CLOSE")
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the gate."""
await self.async_action("OPEN_CLOSE")
async def async_toggle(self, **kwargs) -> None:
"""Toggle the gate."""
await self.async_action("OPEN_CLOSE")
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_GARAGE
|
gurneyalex/stock-logistics-workflow
|
refs/heads/8.0
|
stock_scanner/demo/Tutorial/Step_types/scanner_scenario_step_step_types_final.py
|
7
|
# flake8: noqa
# Use <m> or <message> to retrieve the data transmitted by the scanner.
# Use <t> or <terminal> to retrieve the running terminal browse record.
# Put the returned action code in <act>, as a single character.
# Put the returned result or message in <res>, as a list of strings.
# Put the returned value in <val>, as an integer
act = 'F'
res = [
_('|Final step'),
'',
_('After this step, the scenario is finished.'),
]
|
Squarespace/graphite-web
|
refs/heads/master
|
examples/example-client.py
|
73
|
#!/usr/bin/python
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import sys
import time
import os
import platform
import subprocess
from socket import socket
CARBON_SERVER = '127.0.0.1'
CARBON_PORT = 2003
delay = 60
if len(sys.argv) > 1:
delay = int( sys.argv[1] )
def get_loadavg():
# For more details, "man proc" and "man uptime"
if platform.system() == "Linux":
return open('/proc/loadavg').read().strip().split()[:3]
else:
command = "uptime"
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
os.waitpid(process.pid, 0)
output = process.stdout.read().replace(',', ' ').strip().split()
length = len(output)
return output[length - 3:length]
sock = socket()
try:
sock.connect( (CARBON_SERVER,CARBON_PORT) )
except:
print "Couldn't connect to %(server)s on port %(port)d, is carbon-agent.py running?" % { 'server':CARBON_SERVER, 'port':CARBON_PORT }
sys.exit(1)
while True:
now = int( time.time() )
lines = []
#We're gonna report all three loadavg values
loadavg = get_loadavg()
lines.append("system.loadavg_1min %s %d" % (loadavg[0],now))
lines.append("system.loadavg_5min %s %d" % (loadavg[1],now))
lines.append("system.loadavg_15min %s %d" % (loadavg[2],now))
message = '\n'.join(lines) + '\n' #all lines must end in a newline
print "sending message\n"
print '-' * 80
print message
print
sock.sendall(message)
time.sleep(delay)
|
arnavd96/Cinemiezer
|
refs/heads/master
|
myvenv/lib/python3.4/site-packages/oauth2_provider/__init__.py
|
3
|
__version__ = '0.10.0'
__author__ = "Massimiliano Pippi & Federico Frenguelli"
default_app_config = 'oauth2_provider.apps.DOTConfig'
VERSION = __version__ # synonym
|
glouppe/scikit-learn
|
refs/heads/master
|
examples/semi_supervised/plot_label_propagation_structure.py
|
45
|
"""
==============================================
Label Propagation learning a complex structure
==============================================
Example of LabelPropagation learning a complex internal structure
to demonstrate "manifold learning". The outer circle should be
labeled "red" and the inner circle "blue". Because both label groups
lie inside their own distinct shape, we can see that the labels
propagate correctly around the circle.
"""
print(__doc__)
# Authors: Clay Woolam <clay@woolam.org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn.semi_supervised import label_propagation
from sklearn.datasets import make_circles
# generate ring with inner box
n_samples = 200
X, y = make_circles(n_samples=n_samples, shuffle=False)
outer, inner = 0, 1
labels = -np.ones(n_samples)
labels[0] = outer
labels[-1] = inner
###############################################################################
# Learn with LabelSpreading
label_spread = label_propagation.LabelSpreading(kernel='knn', alpha=1.0)
label_spread.fit(X, labels)
###############################################################################
# Plot output labels
output_labels = label_spread.transduction_
plt.figure(figsize=(8.5, 4))
plt.subplot(1, 2, 1)
plt.scatter(X[labels == outer, 0], X[labels == outer, 1], color='navy',
marker='s', lw=0, label="outer labeled", s=10)
plt.scatter(X[labels == inner, 0], X[labels == inner, 1], color='c',
marker='s', lw=0, label='inner labeled', s=10)
plt.scatter(X[labels == -1, 0], X[labels == -1, 1], color='darkorange',
marker='.', label='unlabeled')
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Raw data (2 classes=outer and inner)")
plt.subplot(1, 2, 2)
output_label_array = np.asarray(output_labels)
outer_numbers = np.where(output_label_array == outer)[0]
inner_numbers = np.where(output_label_array == inner)[0]
plt.scatter(X[outer_numbers, 0], X[outer_numbers, 1], color='navy',
marker='s', lw=0, s=10, label="outer learned")
plt.scatter(X[inner_numbers, 0], X[inner_numbers, 1], color='c',
marker='s', lw=0, s=10, label="inner learned")
plt.legend(scatterpoints=1, shadow=False, loc='upper right')
plt.title("Labels learned with Label Spreading (KNN)")
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
plt.show()
|
salguarnieri/intellij-community
|
refs/heads/master
|
python/testData/quickdoc/HoverOverClass.py
|
83
|
class A(object):
"Doc of A"
pass
class B(A):
"Doc of B"
pass
<the_ref>A
|
intk/bda.plone.molliepayment
|
refs/heads/master
|
src/bda/plone/molliepayment/__init__.py
|
1
|
from bda.plone.shop import message_factory as _
from zope import schema
from plone.supermodel import model
from zope.interface import Interface
from zope.interface import provider
from bda.plone.shop.interfaces import IShopSettingsProvider
#from zope.interface import Attribute
@provider(IShopSettingsProvider)
class IMolliePaymentSettings(model.Schema):
model.fieldset( 'mollie',label=_(u'mollie', default=u'mollie'),
fields=[
'mollie_server_url',
'mollie_sha_in_password',
'mollie_sha_out_password',
],
)
mollie_server_url = schema.ASCIILine(title=_(u'mollie_server_url', default=u'Server url'),
required=True
)
mollie_sha_in_password = schema.ASCIILine(title=_(u'mollie_sha_in_password', default=u'SHA in password'),
required=True
)
mollie_sha_out_password = schema.ASCIILine(title=_(u'mollie_sha_out_password', default=u'SHA out password'),
required=True
)
|
DANCEcollaborative/forum-xblock
|
refs/heads/master
|
XBlock Integration Files/xdjangobb/xblock/lib/python2.7/site-packages/django/contrib/localflavor/mk/forms.py
|
89
|
from __future__ import absolute_import
import datetime
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
from django.contrib.localflavor.mk.mk_choices import MK_MUNICIPALITIES
class MKIdentityCardNumberField(RegexField):
"""
A Macedonian ID card number. Accepts both old and new format.
"""
default_error_messages = {
'invalid': _(u'Identity card numbers must contain'
' either 4 to 7 digits or an uppercase letter and 7 digits.'),
}
def __init__(self, *args, **kwargs):
kwargs['min_length'] = None
kwargs['max_length'] = 8
regex = ur'(^[A-Z]{1}\d{7}$)|(^\d{4,7}$)'
super(MKIdentityCardNumberField, self).__init__(regex, *args, **kwargs)
class MKMunicipalitySelect(Select):
"""
A form ``Select`` widget that uses a list of Macedonian municipalities as
choices. The label is the name of the municipality and the value
is a 2 character code for the municipality.
"""
def __init__(self, attrs=None):
super(MKMunicipalitySelect, self).__init__(attrs, choices = MK_MUNICIPALITIES)
class UMCNField(RegexField):
"""
A form field that validates input as a unique master citizen
number.
The format of the unique master citizen number has been kept the same from
Yugoslavia. It is still in use in other countries as well, it is not applicable
solely in Macedonia. For more information see:
https://secure.wikimedia.org/wikipedia/en/wiki/Unique_Master_Citizen_Number
A value will pass validation if it complies to the following rules:
* Consists of exactly 13 digits
* The first 7 digits represent a valid past date in the format DDMMYYY
* The last digit of the UMCN passes a checksum test
"""
default_error_messages = {
'invalid': _(u'This field should contain exactly 13 digits.'),
'date': _(u'The first 7 digits of the UMCN must represent a valid past date.'),
'checksum': _(u'The UMCN is not valid.'),
}
def __init__(self, *args, **kwargs):
kwargs['min_length'] = None
kwargs['max_length'] = 13
super(UMCNField, self).__init__(r'^\d{13}$', *args, **kwargs)
def clean(self, value):
value = super(UMCNField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not self._validate_date_part(value):
raise ValidationError(self.error_messages['date'])
if self._validate_checksum(value):
return value
else:
raise ValidationError(self.error_messages['checksum'])
def _validate_checksum(self, value):
a,b,c,d,e,f,g,h,i,j,k,l,K = [int(digit) for digit in value]
m = 11 - (( 7*(a+g) + 6*(b+h) + 5*(c+i) + 4*(d+j) + 3*(e+k) + 2*(f+l)) % 11)
if (m >= 1 and m <= 9) and K == m:
return True
elif m == 11 and K == 0:
return True
else:
return False
def _validate_date_part(self, value):
daypart, monthpart, yearpart = int(value[:2]), int(value[2:4]), int(value[4:7])
if yearpart >= 800:
yearpart += 1000
else:
yearpart += 2000
try:
date = datetime.datetime(year = yearpart, month = monthpart, day = daypart).date()
except ValueError:
return False
if date >= datetime.datetime.now().date():
return False
return True
|
llenfest/programingworkshop
|
refs/heads/master
|
Python/pytds/__init__.py
|
8
|
# use __init__.py to setup the namespace
import util
# will be able to call things in util using pytds
# import pytds
# pytds.util
|
DANCEcollaborative/forum-xblock
|
refs/heads/master
|
XBlock Integration Files/xdjangobb/xblock/lib/python2.7/site-packages/django/db/models/options.py
|
92
|
import re
from bisect import bisect
from django.conf import settings
from django.db.models.related import RelatedObject
from django.db.models.fields.related import ManyToManyRel
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.proxy import OrderWrt
from django.db.models.loading import get_models, app_cache_ready
from django.utils.translation import activate, deactivate_all, get_language, string_concat
from django.utils.encoding import force_unicode, smart_str
from django.utils.datastructures import SortedDict
# Calculate the verbose_name by converting from InitialCaps to "lowercase with spaces".
get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', ' \\1', class_name).lower().strip()
DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering',
'unique_together', 'permissions', 'get_latest_by',
'order_with_respect_to', 'app_label', 'db_tablespace',
'abstract', 'managed', 'proxy', 'auto_created')
class Options(object):
def __init__(self, meta, app_label=None):
self.local_fields, self.local_many_to_many = [], []
self.virtual_fields = []
self.module_name, self.verbose_name = None, None
self.verbose_name_plural = None
self.db_table = ''
self.ordering = []
self.unique_together = []
self.permissions = []
self.object_name, self.app_label = None, app_label
self.get_latest_by = None
self.order_with_respect_to = None
self.db_tablespace = settings.DEFAULT_TABLESPACE
self.admin = None
self.meta = meta
self.pk = None
self.has_auto_field, self.auto_field = False, None
self.abstract = False
self.managed = True
self.proxy = False
# For any class that is a proxy (including automatically created
# classes for deferred object loading), proxy_for_model tells us
# which class this model is proxying. Note that proxy_for_model
# can create a chain of proxy models. For non-proxy models, the
# variable is always None.
self.proxy_for_model = None
# For any non-abstract class, the concrete class is the model
# in the end of the proxy_for_model chain. In particular, for
# concrete models, the concrete_model is always the class itself.
self.concrete_model = None
self.parents = SortedDict()
self.duplicate_targets = {}
self.auto_created = False
# To handle various inheritance situations, we need to track where
# managers came from (concrete or abstract base classes).
self.abstract_managers = []
self.concrete_managers = []
# List of all lookups defined in ForeignKey 'limit_choices_to' options
# from *other* models. Needed for some admin checks. Internal use only.
self.related_fkey_lookups = []
def contribute_to_class(self, cls, name):
from django.db import connection
from django.db.backends.util import truncate_name
cls._meta = self
self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS
# First, construct the default values for these options.
self.object_name = cls.__name__
self.module_name = self.object_name.lower()
self.verbose_name = get_verbose_name(self.object_name)
# Next, apply any overridden values from 'class Meta'.
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
# Ignore any private attributes that Django doesn't care about.
# NOTE: We can't modify a dictionary's contents while looping
# over it, so we loop over the *original* dictionary instead.
if name.startswith('_'):
del meta_attrs[name]
for attr_name in DEFAULT_NAMES:
if attr_name in meta_attrs:
setattr(self, attr_name, meta_attrs.pop(attr_name))
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
# unique_together can be either a tuple of tuples, or a single
# tuple of two strings. Normalize it to a tuple of tuples, so that
# calling code can uniformly expect that.
ut = meta_attrs.pop('unique_together', self.unique_together)
if ut and not isinstance(ut[0], (tuple, list)):
ut = (ut,)
self.unique_together = ut
# verbose_name_plural is a special case because it uses a 's'
# by default.
if self.verbose_name_plural is None:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys()))
else:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
del self.meta
# If the db_table wasn't provided, use the app_label + module_name.
if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.module_name)
self.db_table = truncate_name(self.db_table, connection.ops.max_name_length())
def _prepare(self, model):
if self.order_with_respect_to:
self.order_with_respect_to = self.get_field(self.order_with_respect_to)
self.ordering = ('_order',)
model.add_to_class('_order', OrderWrt())
else:
self.order_with_respect_to = None
if self.pk is None:
if self.parents:
# Promote the first parent link in lieu of adding yet another
# field.
field = self.parents.value_for_index(0)
# Look for a local field with the same name as the
# first parent link. If a local field has already been
# created, use it instead of promoting the parent
already_created = [fld for fld in self.local_fields if fld.name == field.name]
if already_created:
field = already_created[0]
field.primary_key = True
self.setup_pk(field)
else:
auto = AutoField(verbose_name='ID', primary_key=True,
auto_created=True)
model.add_to_class('id', auto)
# Determine any sets of fields that are pointing to the same targets
# (e.g. two ForeignKeys to the same remote model). The query
# construction code needs to know this. At the end of this,
# self.duplicate_targets will map each duplicate field column to the
# columns it duplicates.
collections = {}
for column, target in self.duplicate_targets.iteritems():
try:
collections[target].add(column)
except KeyError:
collections[target] = set([column])
self.duplicate_targets = {}
for elt in collections.itervalues():
if len(elt) == 1:
continue
for column in elt:
self.duplicate_targets[column] = elt.difference(set([column]))
def add_field(self, field):
# Insert the given field in the order in which it was created, using
# the "creation_counter" attribute of the field.
# Move many-to-many related fields from self.fields into
# self.many_to_many.
if field.rel and isinstance(field.rel, ManyToManyRel):
self.local_many_to_many.insert(bisect(self.local_many_to_many, field), field)
if hasattr(self, '_m2m_cache'):
del self._m2m_cache
else:
self.local_fields.insert(bisect(self.local_fields, field), field)
self.setup_pk(field)
if hasattr(self, '_field_cache'):
del self._field_cache
del self._field_name_cache
if hasattr(self, '_name_map'):
del self._name_map
def add_virtual_field(self, field):
self.virtual_fields.append(field)
def setup_pk(self, field):
if not self.pk and field.primary_key:
self.pk = field
field.serialize = False
def setup_proxy(self, target):
"""
Does the internal setup so that the current model is a proxy for
"target".
"""
self.pk = target._meta.pk
self.proxy_for_model = target
self.db_table = target._meta.db_table
def __repr__(self):
return '<Options for %s>' % self.object_name
def __str__(self):
return "%s.%s" % (smart_str(self.app_label), smart_str(self.module_name))
def verbose_name_raw(self):
"""
There are a few places where the untranslated verbose name is needed
(so that we get the same value regardless of currently active
locale).
"""
lang = get_language()
deactivate_all()
raw = force_unicode(self.verbose_name)
activate(lang)
return raw
verbose_name_raw = property(verbose_name_raw)
def _fields(self):
"""
The getter for self.fields. This returns the list of field objects
available to this model (including through parent models).
Callers are not permitted to modify this list, since it's a reference
to this instance (not a copy).
"""
try:
self._field_name_cache
except AttributeError:
self._fill_fields_cache()
return self._field_name_cache
fields = property(_fields)
def get_fields_with_model(self):
"""
Returns a sequence of (field, model) pairs for all fields. The "model"
element is None for fields on the current model. Mostly of use when
constructing queries so that we know which model a field belongs to.
"""
try:
self._field_cache
except AttributeError:
self._fill_fields_cache()
return self._field_cache
def _fill_fields_cache(self):
cache = []
for parent in self.parents:
for field, model in parent._meta.get_fields_with_model():
if model:
cache.append((field, model))
else:
cache.append((field, parent))
cache.extend([(f, None) for f in self.local_fields])
self._field_cache = tuple(cache)
self._field_name_cache = [x for x, _ in cache]
def _many_to_many(self):
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return self._m2m_cache.keys()
many_to_many = property(_many_to_many)
def get_m2m_with_model(self):
"""
The many-to-many version of get_fields_with_model().
"""
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return self._m2m_cache.items()
def _fill_m2m_cache(self):
cache = SortedDict()
for parent in self.parents:
for field, model in parent._meta.get_m2m_with_model():
if model:
cache[field] = model
else:
cache[field] = parent
for field in self.local_many_to_many:
cache[field] = None
self._m2m_cache = cache
def get_field(self, name, many_to_many=True):
"""
Returns the requested field by name. Raises FieldDoesNotExist on error.
"""
to_search = many_to_many and (self.fields + self.many_to_many) or self.fields
for f in to_search:
if f.name == name:
return f
raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name))
def get_field_by_name(self, name):
"""
Returns the (field_object, model, direct, m2m), where field_object is
the Field instance for the given name, model is the model containing
this field (None for local fields), direct is True if the field exists
on this model, and m2m is True for many-to-many relations. When
'direct' is False, 'field_object' is the corresponding RelatedObject
for this field (since the field doesn't have an instance associated
with it).
Uses a cache internally, so after the first access, this is very fast.
"""
try:
try:
return self._name_map[name]
except AttributeError:
cache = self.init_name_map()
return cache[name]
except KeyError:
raise FieldDoesNotExist('%s has no field named %r'
% (self.object_name, name))
def get_all_field_names(self):
"""
Returns a list of all field names that are possible for this model
(including reverse relation names). This is used for pretty printing
debugging output (a list of choices), so any internal-only field names
are not included.
"""
try:
cache = self._name_map
except AttributeError:
cache = self.init_name_map()
names = cache.keys()
names.sort()
# Internal-only names end with "+" (symmetrical m2m related names being
# the main example). Trim them.
return [val for val in names if not val.endswith('+')]
def init_name_map(self):
"""
Initialises the field name -> field object mapping.
"""
cache = {}
# We intentionally handle related m2m objects first so that symmetrical
# m2m accessor names can be overridden, if necessary.
for f, model in self.get_all_related_m2m_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, True)
for f, model in self.get_all_related_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, False)
for f, model in self.get_m2m_with_model():
cache[f.name] = (f, model, True, True)
for f, model in self.get_fields_with_model():
cache[f.name] = (f, model, True, False)
if app_cache_ready():
self._name_map = cache
return cache
def get_add_permission(self):
return 'add_%s' % self.object_name.lower()
def get_change_permission(self):
return 'change_%s' % self.object_name.lower()
def get_delete_permission(self):
return 'delete_%s' % self.object_name.lower()
def get_all_related_objects(self, local_only=False, include_hidden=False,
include_proxy_eq=False):
return [k for k, v in self.get_all_related_objects_with_model(
local_only=local_only, include_hidden=include_hidden,
include_proxy_eq=include_proxy_eq)]
def get_all_related_objects_with_model(self, local_only=False,
include_hidden=False,
include_proxy_eq=False):
"""
Returns a list of (related-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
self._related_objects_cache
except AttributeError:
self._fill_related_objects_cache()
predicates = []
if local_only:
predicates.append(lambda k, v: not v)
if not include_hidden:
predicates.append(lambda k, v: not k.field.rel.is_hidden())
cache = (self._related_objects_proxy_cache if include_proxy_eq
else self._related_objects_cache)
return filter(lambda t: all([p(*t) for p in predicates]), cache.items())
def _fill_related_objects_cache(self):
cache = SortedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_objects_with_model(include_hidden=True):
if (obj.field.creation_counter < 0 or obj.field.rel.parent_link) and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
# Collect also objects which are in relation to some proxy child/parent of self.
proxy_cache = cache.copy()
for klass in get_models(include_auto_created=True, only_installed=False):
for f in klass._meta.local_fields:
if f.rel and not isinstance(f.rel.to, basestring):
if self == f.rel.to._meta:
cache[RelatedObject(f.rel.to, klass, f)] = None
proxy_cache[RelatedObject(f.rel.to, klass, f)] = None
elif self.concrete_model == f.rel.to._meta.concrete_model:
proxy_cache[RelatedObject(f.rel.to, klass, f)] = None
self._related_objects_cache = cache
self._related_objects_proxy_cache = proxy_cache
def get_all_related_many_to_many_objects(self, local_only=False):
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
if local_only:
return [k for k, v in cache.items() if not v]
return cache.keys()
def get_all_related_m2m_objects_with_model(self):
"""
Returns a list of (related-m2m-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
return cache.items()
def _fill_related_many_to_many_cache(self):
cache = SortedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_m2m_objects_with_model():
if obj.field.creation_counter < 0 and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
for klass in get_models(only_installed=False):
for f in klass._meta.local_many_to_many:
if f.rel and not isinstance(f.rel.to, basestring) and self == f.rel.to._meta:
cache[RelatedObject(f.rel.to, klass, f)] = None
if app_cache_ready():
self._related_many_to_many_cache = cache
return cache
def get_base_chain(self, model):
"""
Returns a list of parent classes leading to 'model' (order from closet
to most distant ancestor). This has to handle the case were 'model' is
a granparent or even more distant relation.
"""
if not self.parents:
return
if model in self.parents:
return [model]
for parent in self.parents:
res = parent._meta.get_base_chain(model)
if res:
res.insert(0, parent)
return res
raise TypeError('%r is not an ancestor of this model'
% model._meta.module_name)
def get_parent_list(self):
"""
Returns a list of all the ancestor of this model as a list. Useful for
determining if something is an ancestor, regardless of lineage.
"""
result = set()
for parent in self.parents:
result.add(parent)
result.update(parent._meta.get_parent_list())
return result
def get_ancestor_link(self, ancestor):
"""
Returns the field on the current model which points to the given
"ancestor". This is possible an indirect link (a pointer to a parent
model, which points, eventually, to the ancestor). Used when
constructing table joins for model inheritance.
Returns None if the model isn't an ancestor of this one.
"""
if ancestor in self.parents:
return self.parents[ancestor]
for parent in self.parents:
# Tries to get a link field from the immediate parent
parent_link = parent._meta.get_ancestor_link(ancestor)
if parent_link:
# In case of a proxied model, the first link
# of the chain to the ancestor is that parent
# links
return self.parents[parent] or parent_link
def get_ordered_objects(self):
"Returns a list of Options objects that are ordered with respect to this object."
if not hasattr(self, '_ordered_objects'):
objects = []
# TODO
#for klass in get_models(get_app(self.app_label)):
# opts = klass._meta
# if opts.order_with_respect_to and opts.order_with_respect_to.rel \
# and self == opts.order_with_respect_to.rel.to._meta:
# objects.append(opts)
self._ordered_objects = objects
return self._ordered_objects
def pk_index(self):
"""
Returns the index of the primary key field in the self.fields list.
"""
return self.fields.index(self.pk)
|
npe9/emacs
|
refs/heads/master
|
test/automated/data/package/package-test-server.py
|
24
|
import sys
import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
|
173210/Monarudo_GPU_M7
|
refs/heads/4.4.2_dlxj
|
Documentation/target/tcm_mod_builder.py
|
4981
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
is210-faculty/is210-week-12-synthesizing
|
refs/heads/master
|
tests/test_smoke.py
|
245
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Smoke test for test suite."""
# Import Python libs
import unittest
class SmokeTestCase(unittest.TestCase):
"""Test cases to ensure that the test suite is operational."""
def test_true(self):
"""Tests that True is True."""
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
|
polyrabbit/polyglot
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup, find_packages
# see https://github.com/GaretJax/i18n-utils/blob/master/setup.py
# and https://github.com/elastic/curator/blob/master/setup.py
setup(
name='polyglot',
version='0.1',
url='https://github.com/polyrabbit/polyglot',
license='MIT',
author='poly',
author_email='mcx_221@foxmail.com',
description='A computer language savant',
packages=find_packages(exclude=['tests']),
include_package_data=True,
platforms='any',
install_requires=open('./requirements.txt').read().split('\n'),
setup_requires=['nose'],
test_suite='nose.collector',
tests_require=['nose'],
entry_points={
"console_scripts": ["polyglot=polyglot.cli:run"]
}
)
|
burrowsa/initdotpy
|
refs/heads/master
|
initdotpy/__init__.py
|
1
|
"""The initdotpy package makes it simple to write __init__.py files that automatically include the package contents.
For example if you have an __init__.py that looks like::
import submodule1
import submodule2
import submodule3
import subpackage1
import subpackage2
import subpackage3
You can replace it with::
from initdotpy import auto_import
auto_import()
and it will automatically import all the modules/packages contained in the package and stay up to date when you make changes to the package contents.
Or if you prefer to import the contents of the submodules/subpackages, e.g.::
from submodule1 import *
from submodule2 import *
from submodule3 import *
from subpackage1 import *
from subpackage2 import *
from subpackage3 import *
You can just write your __init__.py as::
from initdotpy import auto_import_contents
auto_import_contents()
Again this __init__.py automatically stays up to date so you need never edit it again."""
import os
import sys
import pkgutil
import inspect
__all__ = ['auto_import', "auto_import_contents"]
def auto_import(exclude=tuple()):
"""If you have an __init__.py that looks like::
import submodule1
import submodule2
import submodule3
import subpackage1
import subpackage2
import subpackage3
You can replace it with::
from initdotpy import auto_import
auto_import()
and it will automatically import all the modules/packages contained in the package and stay up to date when you make changes to the package contents."""
def add_child_to_parent(parent_module, child, child_module):
setattr(parent_module, child, child_module)
parent_module.__all__.append(child)
_auto_import_impl(add_child_to_parent, False, exclude, auto_import)
def auto_import_contents(exclude=tuple()):
"""If you have an __init__.py that looks like::
from submodule1 import *
from submodule2 import *
from submodule3 import *
from subpackage1 import *
from subpackage2 import *
from subpackage3 import *
You can just write your __init__.py as::
from initdotpy import auto_import_contents
auto_import_contents()
In this case every submodule/subpackage must have an __all__ defined and there must not be duplicate definitions of
the same name. It will automatically import the contents from all the modules/packages contained in the package
and stay up to date when you make changes to the package contents."""
def add_child_contents_to_parent(parent_module, child, child_module):
if not hasattr(child_module, '__all__'):
raise RuntimeError("Module or package %s does not define __all__" % child)
duplicates = set(child_module.__all__).intersection(parent_module.__all__)
if duplicates:
raise RuntimeError("The following names, defined in %s, are already defined elsewhere: %s"
% (child, duplicates))
else:
for name in child_module.__all__:
setattr(parent_module, name, getattr(child_module, name))
parent_module.__all__.append(name)
_auto_import_impl(add_child_contents_to_parent, True, exclude, auto_import_contents)
def _auto_import_impl(func, import_contents, exclude, item_for_removal):
"""Implements auto_import and auto_import_contents"""
parent_module = inspect.getmodule(inspect.stack()[2][0])
if not hasattr(parent_module, '__all__'):
parent_module.__all__ = []
for module_loader, child, _ in pkgutil.iter_modules([os.path.dirname(parent_module.__file__)]):
if child not in exclude:
full_child_name = parent_module.__name__ + '.' + child
if full_child_name in sys.modules:
child_module = sys.modules[full_child_name]
else:
child_module = module_loader.find_module(full_child_name).load_module(full_child_name)
func(parent_module, child, child_module)
for attr_name in dir(parent_module):
attr_value = getattr(parent_module, attr_name)
if attr_value is item_for_removal and attr_name not in parent_module.__all__:
delattr(parent_module, attr_name)
|
yufengg/tensorflow
|
refs/heads/master
|
tensorflow/python/ops/gradients_test.py
|
41
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import warnings
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.ops import array_grad # pylint: disable=unused-import
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import data_flow_ops # pylint: disable=unused-import
from tensorflow.python.ops import functional_ops # pylint: disable=unused-import
from tensorflow.python.ops import gradients
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import state_grad # pylint: disable=unused-import
from tensorflow.python.ops import tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.nn_ops import bias_add
from tensorflow.python.platform import googletest
def _OpsBetween(graph, to_ops, from_ops):
"""Build the list of operations between two lists of Operations.
Args:
graph: a Graph.
to_ops: list of Operations.
from_ops: list of Operations.
Returns:
The list of operations between "from_ops" and "to_ops", sorted by
decreasing operation id. This list contains all elements of to_ops.
TODO(touts): Think about returning an empty list if from_ops are not
reachable from to_ops. Presently it returns to_ops in that case.
"""
# List of booleans, indexed by operation id, indicating if
# an op is reached from the output of "input_ops".
reached_ops = [False] * (graph._last_id + 1)
# We only care to reach up to "output_ops" so we mark the
# output ops as reached to avoid recursing past them.
for op in to_ops:
reached_ops[op._id] = True
gradients_impl._MarkReachedOps(from_ops, reached_ops)
between_ops = gradients_impl._GatherInputs(to_ops, reached_ops)
between_ops.sort(key=lambda x: -x._id)
return between_ops
class GradientsTest(test_util.TensorFlowTestCase):
def _OpNames(self, op_list):
return ["%s/%d" % (str(op.name), op._id) for op in op_list]
def _assertOpListEqual(self, ops1, ops2):
self.assertEquals(self._OpNames(ops1), self._OpNames(ops2))
def testOpsBetweenSimple(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.stack([t1, t2])
# Full graph
self._assertOpListEqual([t3.op, t2.op, t1.op],
_OpsBetween(g, [t3.op], [t1.op, t2.op]))
# Only t1, t3.
self._assertOpListEqual([t3.op, t1.op], _OpsBetween(g, [t3.op], [t1.op]))
def testOpsBetweenUnreachable(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
_ = array_ops.stack([t1, t2])
t4 = constant(1.0)
t5 = constant(2.0)
t6 = array_ops.stack([t4, t5])
# Elements of to_ops are always listed.
self._assertOpListEqual([t6.op], _OpsBetween(g, [t6.op], [t1.op]))
def testOpsBetweenCut(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.stack([t1, t2])
t4 = constant([1.0])
t5 = array_ops.concat([t4, t3], 0)
t6 = constant([2.0])
t7 = array_ops.concat([t5, t6], 0)
self._assertOpListEqual([t7.op, t5.op, t4.op],
_OpsBetween(g, [t7.op], [t4.op]))
def testOpsBetweenCycle(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.stack([t1, t2])
t4 = array_ops.concat([t3, t3, t3], 0)
t5 = constant([1.0])
t6 = array_ops.concat([t4, t5], 0)
t7 = array_ops.concat([t6, t3], 0)
self._assertOpListEqual([t6.op, t4.op, t3.op],
_OpsBetween(g, [t6.op], [t3.op]))
self._assertOpListEqual([t7.op, t6.op, t5.op, t4.op, t3.op, t1.op],
_OpsBetween(g, [t7.op], [t1.op, t5.op]))
self._assertOpListEqual([t6.op, t5.op, t4.op, t3.op, t2.op],
_OpsBetween(g, [t6.op], [t2.op, t5.op]))
def testGradients(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[32, 100], name="in")
w = constant(1.0, shape=[100, 10], name="w")
b = constant(1.0, shape=[10], name="b")
xw = math_ops.matmul(inp, w, name="xw")
h = bias_add(xw, b, name="h")
w_grad = gradients.gradients(h, w)[0]
self.assertEquals("MatMul", w_grad.op.type)
self.assertEquals(w_grad.op._original_op, xw.op)
self.assertTrue(w_grad.op.get_attr("transpose_a"))
self.assertFalse(w_grad.op.get_attr("transpose_b"))
def testUnusedOutput(self):
with ops.Graph().as_default():
w = constant(1.0, shape=[2, 2])
x = constant(1.0, shape=[2, 2])
wx = math_ops.matmul(w, x)
split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
c = math_ops.reduce_sum(split_wx[1])
gw = gradients.gradients(c, [w])[0]
self.assertEquals("MatMul", gw.op.type)
def testColocateGradients(self):
with ops.Graph().as_default() as g:
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
with g.device("/gpu:0"):
wx = math_ops.matmul(w, x)
gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
def testColocateGradientsWithAggregation(self):
with ops.Graph().as_default() as g:
with g.device("/gpu:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
wx = math_ops.matmul(w, x)
wy = math_ops.matmul(w, y)
with g.device("/gpu:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
def testColocateGradientsWithAggregationInMultipleDevices(self):
with ops.Graph().as_default() as g:
with g.device("/gpu:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
with g.device("/task:1"):
wx = math_ops.matmul(w, x)
with g.device("/task:2"):
wy = math_ops.matmul(w, y)
with g.device("/gpu:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
def testBoundaryStop(self):
# Test that we don't differentiate 'x'. The gradient function for 'x' is
# set explicitly to None so we will get an exception if the gradient code
# tries to differentiate 'x'.
with ops.Graph().as_default():
c = constant(1.0)
x = array_ops.identity(c)
y = x + 1.0
z = y + 1
grads = gradients.gradients(z, [x])
self.assertTrue(all(x is not None for x in grads))
def testBoundaryContinue(self):
# Test that we differentiate both 'x' and 'y' correctly when x is a
# predecessor of y.
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y * 3.0
grads = gradients.gradients(z, [x, y])
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(6.0, grads[0].eval())
def testAggregationMethodAccumulateN(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y],
aggregation_method=gradients.AggregationMethod.
EXPERIMENTAL_ACCUMULATE_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testAggregationMethodAddN(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testAggregationMethodTree(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y],
aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testNoGradientForStringOutputs(self):
with ops.Graph().as_default():
def _TestOpGrad(_, float_grad, string_grad):
"""Gradient function for TestStringOutput."""
self.assertEquals(float_grad.dtype, dtypes.float32)
self.assertFalse(string_grad)
return float_grad
ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
c = constant(1.0)
x, _ = test_ops.test_string_output(c)
z = x * 2.0
w = z * 3.0
grads = gradients.gradients(z, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
grads = gradients.gradients(w, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
def testSingletonIndexedSlices(self):
with ops.Graph().as_default():
x = array_ops.placeholder(dtypes.float32)
y = array_ops.identity(x)
dy = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32))
dx, = gradients.gradients(y, x, grad_ys=dy)
# The gradient of tf.identity should pass the value through unchanged.
# A previous version of the code did this only for tf.Tensor, not
# tf.IndexedSlices.
self.assertEqual(dx, dy)
def testNonDifferentiableSwitchInWhileLoop(self):
with ops.Graph().as_default():
v = array_ops.placeholder(dtypes.float32, [])
def _Step(i, a, ta):
a += math_ops.cast(v, dtypes.int32)
return (i + 1, a, ta.write(i, a))
n = 4
i, _, ta = control_flow_ops.while_loop(
lambda i, *_: i < n,
_Step, [0, 0, tensor_array_ops.TensorArray(
dtypes.int32, size=n)])
target = ta.read(i - 1)
grad, = gradients.gradients(target, v)
self.assertIsNone(grad)
def testVariableReadValueGradient(self):
with ops.Graph().as_default():
init = constant_op.constant(100.0)
var = variables.Variable(init)
gradient = gradients.gradients(var.read_value(), var)
self.assertIsNotNone(gradient)
def testVariableAsGraphElementGradient(self):
with ops.Graph().as_default() as graph:
init = constant_op.constant(100.0)
var = variables.Variable(init)
gradient = gradients.gradients(graph.as_graph_element(var), var)
self.assertIsNotNone(gradient)
def testVariableRefGradient(self):
with ops.Graph().as_default():
init = constant_op.constant(100.0)
var = variables.Variable(init)
gradient = gradients.gradients(var._ref(), var)
self.assertIsNotNone(gradient)
def testDependentYs(self):
with self.test_session():
x = constant_op.constant(3.0)
y = math_ops.square(x)
y1 = math_ops.square(y)
y2 = math_ops.square(y1)
g = gradients.gradients([y, y2], x)
self.assertAllClose(17502.0, g[0].eval())
g = gradients.gradients(y + y2, x)
self.assertAllClose(17502.0, g[0].eval())
z = array_ops.identity(y)
z2 = array_ops.identity(y2)
g = gradients.gradients([z, z2], x)
self.assertAllClose(17502.0, g[0].eval())
class FunctionGradientsTest(test_util.TensorFlowTestCase):
@classmethod
def XSquarePlusB(cls, x, b):
return x * x + b
@classmethod
def XSquarePlusBGradient(cls, x, b, g):
# Perturb gradients (multiply by 2), so we can test that this was called.
g *= 2.0
return g * 2.0 * x, g
@classmethod
def _PythonGradient(cls, op, grad):
# Perturb gradients (multiply by 3), so we can test that this was called.
grad *= 3.0
return grad * op.inputs[0] * 2.0, grad
@classmethod
def _GetFunc(cls, **kwargs):
return function.Defun(dtypes.float32, dtypes.float32, **
kwargs)(cls.XSquarePlusB)
def _GetFuncGradients(self, f, x_value, b_value):
x = constant_op.constant(x_value, name="x")
b = constant_op.constant(b_value, name="b")
y = f(x, b)
grads = gradients.gradients(y, [x, b])
with self.test_session() as sess:
return sess.run(grads)
def testFunctionGradientsBasic(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc()
# Get gradients (should add SymbolicGradient node for function).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0], grads[0])
self.assertAllEqual([1.0], grads[1])
def testFunctionGradientsComposition(self):
with ops.Graph().as_default():
f = self._GetFunc()
x = constant_op.constant([2.0], name="x")
b1 = constant_op.constant([1.0], name="b1")
b2 = constant_op.constant([1.0], name="b2")
y = f(f(x, b1), b2)
# Build gradient graph (should add SymbolicGradient node for function).
grads = gradients.gradients(y, [x, b1])
with self.test_session() as sess:
self.assertAllEqual([40.0], sess.run(grads)[0])
self.assertAllEqual([10.0], sess.run(grads)[1])
def testFunctionGradientsWithGradFunc(self):
g = ops.Graph()
with g.as_default():
grad_func = function.Defun(dtypes.float32, dtypes.float32,
dtypes.float32)(self.XSquarePlusBGradient)
f = self._GetFunc(grad_func=grad_func)
# Get gradients (should add SymbolicGradient node for function, which
# uses the grad_func above, which multiplies all gradients by 2).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 2], grads[0])
self.assertAllEqual([1.0 * 2], grads[1])
def testFunctionGradientWithRegistration(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc(python_grad_func=self._PythonGradient)
# Get gradients, using the python gradient function. It multiplies the
# gradients by 3.
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 3], grads[0])
self.assertAllEqual([1.0 * 3], grads[1])
def testFunctionGradientWithGradFuncAndRegistration(self):
g = ops.Graph()
with g.as_default():
grad_func = function.Defun(dtypes.float32, dtypes.float32,
dtypes.float32)(self.XSquarePlusBGradient)
with self.assertRaisesRegexp(ValueError, "Gradient defined twice"):
f = self._GetFunc(
grad_func=grad_func, python_grad_func=self._PythonGradient)
f.add_to_graph(ops.Graph())
class StopGradientTest(test_util.TensorFlowTestCase):
def testStopGradient(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[100, 32], name="in")
out = array_ops.stop_gradient(inp)
igrad = gradients.gradients(out, inp)[0]
assert igrad is None
class PreventGradientTest(test_util.TensorFlowTestCase):
def testPreventGradient(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[100, 32], name="in")
out = array_ops.prevent_gradient(inp)
with self.assertRaisesRegexp(LookupError, "explicitly disabled"):
_ = gradients.gradients(out, inp)
class HessianVectorProductTest(test_util.TensorFlowTestCase):
def testHessianVectorProduct(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that HessianVectorProduct matches multiplication by the
# explicit Hessian.
# Specifically, the Hessian of f(x) = x^T A x is
# H = A + A^T.
# We expect HessianVectorProduct(f(x), x, v) to be H v.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
v_value = rng.randn(m, 1).astype("float32")
x_value = rng.randn(m, 1).astype("float32")
hess_value = mat_value + mat_value.T
hess_v_value = np.dot(hess_value, v_value)
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
mat = constant_op.constant(mat_value)
v = constant_op.constant(v_value)
x = constant_op.constant(x_value)
mat_x = math_ops.matmul(mat, x, name="Ax")
x_mat_x = math_ops.matmul(array_ops.transpose(x), mat_x, name="xAx")
hess_v = gradients_impl._hessian_vector_product(x_mat_x, [x], [v])[0]
hess_v_actual = hess_v.eval()
self.assertAllClose(hess_v_value, hess_v_actual)
class HessianTest(test_util.TensorFlowTestCase):
def testHessian1D(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that `hessian` matches. Specifically, the Hessian of
# f(x) = x^T A x is H = A + A^T.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
x_value = rng.randn(m).astype("float32")
hess_value = mat_value + mat_value.T
with self.test_session(use_gpu=True):
mat = constant_op.constant(mat_value)
x = constant_op.constant(x_value)
x_mat_x = math_ops.reduce_sum(x[:, None] * mat * x[None, :])
hess = gradients.hessians(x_mat_x, x)[0]
hess_actual = hess.eval()
self.assertAllClose(hess_value, hess_actual)
def testHessian1D_multi(self):
# Test the computation of the hessian with respect to multiple tensors
m = 4
n = 3
rng = np.random.RandomState([1, 2, 3])
mat_values = [rng.randn(m, m).astype("float32") for _ in range(n)]
x_values = [rng.randn(m).astype("float32") for _ in range(n)]
hess_values = [mat_value + mat_value.T for mat_value in mat_values]
with self.test_session(use_gpu=True):
mats = [constant_op.constant(mat_value) for mat_value in mat_values]
xs = [constant_op.constant(x_value) for x_value in x_values]
xs_mats_xs = [
math_ops.reduce_sum(x[:, None] * mat * x[None, :])
for x, mat in zip(xs, mats)
]
hessians = gradients.hessians(xs_mats_xs, xs)
hessians_actual = [hess.eval() for hess in hessians]
for hess_value, hess_actual in zip(hess_values, hessians_actual):
self.assertAllClose(hess_value, hess_actual)
def testHessianInvalidDimension(self):
for shape in [(10, 10), None]:
with self.test_session(use_gpu=True):
x = array_ops.placeholder(dtypes.float32, shape)
# Expect a ValueError because the dimensions are wrong
with self.assertRaises(ValueError):
gradients.hessians(x, x)
class IndexedSlicesToTensorTest(test_util.TensorFlowTestCase):
def testIndexedSlicesToTensor(self):
with self.test_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.multiply(c_sparse, 1.0)
self.assertAllClose(np_val, c_dense.eval())
def testIndexedSlicesToTensorList(self):
with self.test_session():
numpy_list = []
dense_list = []
sparse_list = []
for _ in range(3):
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
numpy_list.append(np_val)
dense_list.append(c)
sparse_list.append(c_sparse)
packed_dense = array_ops.stack(dense_list)
packed_sparse = array_ops.stack(sparse_list)
self.assertAllClose(packed_dense.eval(), packed_sparse.eval())
def testInt64Indices(self):
with self.test_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
c_sparse = ops.IndexedSlices(
c_sparse.values,
math_ops.cast(c_sparse.indices, dtypes.int64), c_sparse.dense_shape)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.multiply(c_sparse, 1.0)
self.assertAllClose(np_val, c_dense.eval())
def testWarnings(self):
# TODO(gunan) Reenable after this issue is fixed:
# https://github.com/google/protobuf/issues/2812
if sys.version_info >= (3, 6):
self.skipTest("Skipped test for Python 3.6+")
# Smaller than the threshold: no warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32), constant([4, 4, 4, 4]))
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(0, len(w))
# Greater than or equal to the threshold: warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32), constant([100, 100, 100, 100]))
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"with 100000000 elements. This may consume a large amount of memory." in
str(w[0].message))
# Unknown dense shape: warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
array_ops.placeholder(dtypes.int32))
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"of unknown shape. This may consume a large amount of memory." in
str(w[0].message))
class OnlyRealGradientsTest(test_util.TensorFlowTestCase):
def testRealOnly(self):
x = constant_op.constant(7+3j, dtype=dtypes.complex64)
y = math_ops.square(x)
with self.assertRaisesRegexp(
TypeError,
r"Gradients of complex tensors must set grad_ys "
r"\(y\.dtype = tf\.complex64\)"):
gradients.gradients(y, x)
if __name__ == "__main__":
googletest.main()
|
ychfan/tensorflow
|
refs/heads/master
|
tensorflow/python/debug/wrappers/grpc_wrapper.py
|
65
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger wrapper session that sends debug data to file:// URLs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Google-internal import(s).
from tensorflow.python.debug.wrappers import framework
class GrpcDebugWrapperSession(framework.NonInteractiveDebugWrapperSession):
"""Debug Session wrapper that send debug data to gRPC stream(s)."""
_GRPC_URL_PREFIX = "grpc://"
def __init__(self,
sess,
grpc_debug_server_addresses,
watch_fn=None,
thread_name_filter=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
grpc_debug_server_addresses: (`str` or `list` of `str`) Single or a list
of the gRPC debug server addresses, in the format of
<host:port>, without the "grpc://" prefix. For example:
"localhost:7000",
["localhost:7000", "192.168.0.2:8000"]
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
TypeError: If `grpc_debug_server_addresses` is not a `str` or a `list`
of `str`.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)
if isinstance(grpc_debug_server_addresses, str):
self._grpc_debug_server_urls = [
self._GRPC_URL_PREFIX + grpc_debug_server_addresses
]
elif isinstance(grpc_debug_server_addresses, list):
self._grpc_debug_server_urls = []
for address in grpc_debug_server_addresses:
if not isinstance(address, str):
raise TypeError(
"Expected type str in list grpc_debug_server_addresses, "
"received type %s" % type(address))
self._grpc_debug_server_urls.append(self._GRPC_URL_PREFIX + address)
else:
raise TypeError(
"Expected type str or list in grpc_debug_server_addresses, "
"received type %s" % type(grpc_debug_server_addresses))
def prepare_run_debug_urls(self, fetches, feed_dict):
"""Implementation of abstract method in superclass.
See doc of `NonInteractiveDebugWrapperSession.prepare_run_debug_urls()`
for details.
Args:
fetches: Same as the `fetches` argument to `Session.run()`
feed_dict: Same as the `feed_dict` argument to `Session.run()`
Returns:
debug_urls: (`str` or `list` of `str`) file:// debug URLs to be used in
this `Session.run()` call.
"""
return self._grpc_debug_server_urls
|
lejubila/piGardenWeb
|
refs/heads/master
|
vendor/almasaeed2010/adminlte/bower_components/bootstrap-datepicker/docs/conf.py
|
171
|
# -*- coding: utf-8 -*-
#
# bootstrap-datepicker documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 2 14:45:57 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = ''
# The full version, including alpha/beta/rc tags.
#release = ''
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
primary_domain = 'js'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'bootstrap-datepicker'
copyright = u'2016, eternicode'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'javascript'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ['_themes',]
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'bootstrap-datepickerdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'bootstrap-datepicker.tex', u'bootstrap-datepicker Documentation',
u'eternicode', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bootstrap-datepicker', u'bootstrap-datepicker Documentation',
[u'eternicode'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'bootstrap-datepicker', u'bootstrap-datepicker Documentation',
u'eternicode', 'bootstrap-datepicker', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
AdamWill/anaconda
|
refs/heads/master
|
tests/glade/check_mnemonics.py
|
9
|
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gladecheck import GladeTest
class CheckMnemonics(GladeTest):
def checkGlade(self, glade_tree):
"""Check for widgets with keyboard accelerators but no mnemonic"""
# Look for labels with use-underline=True and no mnemonic-widget
for label in glade_tree.xpath(".//object[@class='GtkLabel' and ./property[@name='use_underline' and ./text() = 'True'] and not(./property[@name='mnemonic_widget'])]"):
# And now filter out the cases where the label actually does have a mnemonic.
# This list is not comprehensive, probably.
parent = label.getparent()
# Is the label the child of a GtkButton? The button might be pretty far up there.
# Assume widget names that end in "Button" are subclasses of GtkButton
if parent.tag == 'child' and \
label.xpath("ancestor::object[substring(@class, string-length(@class) - string-length('Button') + 1) = 'Button']"):
continue
# Is the label a GtkNotebook tab?
if parent.tag == 'child' and parent.get('type') == 'tab' and \
parent.getparent().get('class') == 'GtkNotebook':
continue
raise AssertionError("Label with accelerator and no mnemonic at %s:%d" % (label.base, label.sourceline))
|
rosmo/ansible
|
refs/heads/devel
|
test/units/modules/net_tools/nios/test_nios_network.py
|
27
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.net_tools.nios import api
from ansible.modules.net_tools.nios import nios_network
from units.compat.mock import patch, MagicMock, Mock
from units.modules.utils import set_module_args
from .test_nios_module import TestNiosModule, load_fixture
class TestNiosNetworkModule(TestNiosModule):
module = nios_network
def setUp(self):
super(TestNiosNetworkModule, self).setUp()
self.module = MagicMock(name='ansible.modules.net_tools.nios.nios_network.WapiModule')
self.module.check_mode = False
self.module.params = {'provider': None}
self.mock_wapi = patch('ansible.modules.net_tools.nios.nios_network.WapiModule')
self.exec_command = self.mock_wapi.start()
self.mock_wapi_run = patch('ansible.modules.net_tools.nios.nios_network.WapiModule.run')
self.mock_wapi_run.start()
self.load_config = self.mock_wapi_run.start()
def tearDown(self):
super(TestNiosNetworkModule, self).tearDown()
self.mock_wapi.stop()
self.mock_wapi_run.stop()
def load_fixtures(self, commands=None):
self.exec_command.return_value = (0, load_fixture('nios_result.txt').strip(), None)
self.load_config.return_value = dict(diff=None, session='session')
def _get_wapi(self, test_object):
wapi = api.WapiModule(self.module)
wapi.get_object = Mock(name='get_object', return_value=test_object)
wapi.create_object = Mock(name='create_object')
wapi.update_object = Mock(name='update_object')
wapi.delete_object = Mock(name='delete_object')
return wapi
def test_nios_network_ipv4_create(self):
self.module.params = {'provider': None, 'state': 'present', 'network': '192.168.10.0/24',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'network': '192.168.10.0/24'})
def test_nios_network_ipv4_dhcp_update(self):
self.module.params = {'provider': None, 'state': 'present', 'network': '192.168.10.0/24',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "network/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"network": "192.168.10.0/24",
"extattrs": {'options': {'name': 'test', 'value': 'ansible.com'}}
}
]
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_network_ipv6_dhcp_update(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6network': 'fe80::/64',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "ipv6network/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"ipv6network": "fe80::/64",
"extattrs": {'options': {'name': 'test', 'value': 'ansible.com'}}
}
]
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_network_ipv4_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'network': '192.168.10.0/24',
'comment': None, 'extattrs': None}
ref = "network/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"network": "192.168.10.0/24",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_network_ipv6_create(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6network': 'fe80::/64',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'ipv6network': 'fe80::/64'})
def test_nios_network_ipv6_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'ipv6network': 'fe80::/64',
'comment': None, 'extattrs': None}
ref = "ipv6network/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"ipv6network": "fe80::/64",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_networkcontainer_ipv4_create(self):
self.module.params = {'provider': None, 'state': 'present', 'networkcontainer': '192.168.10.0/24',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'networkcontainer': '192.168.10.0/24'})
def test_nios_networkcontainer_ipv4_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'networkcontainer': '192.168.10.0/24',
'comment': None, 'extattrs': None}
ref = "networkcontainer/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"networkcontainer": "192.168.10.0/24"
}]
test_spec = {
"networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_networkcontainer_ipv6_create(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6networkcontainer': 'fe80::/64',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"ipv6networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'ipv6networkcontainer': 'fe80::/64'})
|
wemanuel/smry
|
refs/heads/master
|
smry/server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/boto/ec2/spotdatafeedsubscription.py
|
152
|
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Spot Instance Datafeed Subscription
"""
from boto.ec2.ec2object import EC2Object
from boto.ec2.spotinstancerequest import SpotInstanceStateFault
class SpotDatafeedSubscription(EC2Object):
def __init__(self, connection=None, owner_id=None,
bucket=None, prefix=None, state=None, fault=None):
super(SpotDatafeedSubscription, self).__init__(connection)
self.owner_id = owner_id
self.bucket = bucket
self.prefix = prefix
self.state = state
self.fault = fault
def __repr__(self):
return 'SpotDatafeedSubscription:%s' % self.bucket
def startElement(self, name, attrs, connection):
if name == 'fault':
self.fault = SpotInstanceStateFault()
return self.fault
else:
return None
def endElement(self, name, value, connection):
if name == 'ownerId':
self.owner_id = value
elif name == 'bucket':
self.bucket = value
elif name == 'prefix':
self.prefix = value
elif name == 'state':
self.state = value
else:
setattr(self, name, value)
def delete(self, dry_run=False):
return self.connection.delete_spot_datafeed_subscription(
dry_run=dry_run
)
|
yasinn/shadowsocks
|
refs/heads/master
|
shadowsocks/encrypt.py
|
990
|
#!/usr/bin/env python
#
# Copyright 2012-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
from shadowsocks import common
from shadowsocks.crypto import rc4_md5, openssl, sodium, table
method_supported = {}
method_supported.update(rc4_md5.ciphers)
method_supported.update(openssl.ciphers)
method_supported.update(sodium.ciphers)
method_supported.update(table.ciphers)
def random_string(length):
return os.urandom(length)
cached_keys = {}
def try_cipher(key, method=None):
Encryptor(key, method)
def EVP_BytesToKey(password, key_len, iv_len):
# equivalent to OpenSSL's EVP_BytesToKey() with count 1
# so that we make the same key and iv as nodejs version
cached_key = '%s-%d-%d' % (password, key_len, iv_len)
r = cached_keys.get(cached_key, None)
if r:
return r
m = []
i = 0
while len(b''.join(m)) < (key_len + iv_len):
md5 = hashlib.md5()
data = password
if i > 0:
data = m[i - 1] + password
md5.update(data)
m.append(md5.digest())
i += 1
ms = b''.join(m)
key = ms[:key_len]
iv = ms[key_len:key_len + iv_len]
cached_keys[cached_key] = (key, iv)
return key, iv
class Encryptor(object):
def __init__(self, key, method):
self.key = key
self.method = method
self.iv = None
self.iv_sent = False
self.cipher_iv = b''
self.decipher = None
method = method.lower()
self._method_info = self.get_method_info(method)
if self._method_info:
self.cipher = self.get_cipher(key, method, 1,
random_string(self._method_info[1]))
else:
logging.error('method %s not supported' % method)
sys.exit(1)
def get_method_info(self, method):
method = method.lower()
m = method_supported.get(method)
return m
def iv_len(self):
return len(self.cipher_iv)
def get_cipher(self, password, method, op, iv):
password = common.to_bytes(password)
m = self._method_info
if m[0] > 0:
key, iv_ = EVP_BytesToKey(password, m[0], m[1])
else:
# key_length == 0 indicates we should use the key directly
key, iv = password, b''
iv = iv[:m[1]]
if op == 1:
# this iv is for cipher not decipher
self.cipher_iv = iv[:m[1]]
return m[2](method, key, iv, op)
def encrypt(self, buf):
if len(buf) == 0:
return buf
if self.iv_sent:
return self.cipher.update(buf)
else:
self.iv_sent = True
return self.cipher_iv + self.cipher.update(buf)
def decrypt(self, buf):
if len(buf) == 0:
return buf
if self.decipher is None:
decipher_iv_len = self._method_info[1]
decipher_iv = buf[:decipher_iv_len]
self.decipher = self.get_cipher(self.key, self.method, 0,
iv=decipher_iv)
buf = buf[decipher_iv_len:]
if len(buf) == 0:
return buf
return self.decipher.update(buf)
def encrypt_all(password, method, op, data):
result = []
method = method.lower()
(key_len, iv_len, m) = method_supported[method]
if key_len > 0:
key, _ = EVP_BytesToKey(password, key_len, iv_len)
else:
key = password
if op:
iv = random_string(iv_len)
result.append(iv)
else:
iv = data[:iv_len]
data = data[iv_len:]
cipher = m(method, key, iv, op)
result.append(cipher.update(data))
return b''.join(result)
CIPHERS_TO_TEST = [
'aes-128-cfb',
'aes-256-cfb',
'rc4-md5',
'salsa20',
'chacha20',
'table',
]
def test_encryptor():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
encryptor = Encryptor(b'key', method)
decryptor = Encryptor(b'key', method)
cipher = encryptor.encrypt(plain)
plain2 = decryptor.decrypt(cipher)
assert plain == plain2
def test_encrypt_all():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
cipher = encrypt_all(b'key', method, 1, plain)
plain2 = encrypt_all(b'key', method, 0, cipher)
assert plain == plain2
if __name__ == '__main__':
test_encrypt_all()
test_encryptor()
|
TshepangRas/tshilo-dikotla
|
refs/heads/develop
|
td_maternal/admin/maternal_diagnoses_admin.py
|
2
|
from django.contrib import admin
from ..forms import MaternalDiagnosesForm
from ..models import MaternalDiagnoses
from .base_maternal_model_admin import BaseMaternalModelAdmin
class MaternalDiagnosesAdmin(BaseMaternalModelAdmin):
form = MaternalDiagnosesForm
list_display = ('maternal_visit', 'new_diagnoses', 'has_who_dx')
list_filter = ('new_diagnoses', 'has_who_dx')
radio_fields = {'new_diagnoses': admin.VERTICAL,
'has_who_dx': admin.VERTICAL}
filter_horizontal = ('who', 'diagnoses')
admin.site.register(MaternalDiagnoses, MaternalDiagnosesAdmin)
|
juliakreutzer/bandit-neuralmonkey
|
refs/heads/master
|
neuralmonkey/trainers/__init__.py
|
1
|
from .cross_entropy_trainer import CrossEntropyTrainer
|
2mf/moto
|
refs/heads/master
|
moto/ec2/urls.py
|
18
|
from __future__ import unicode_literals
from .responses import EC2Response
url_bases = [
"https?://ec2.(.+).amazonaws.com",
]
url_paths = {
'{0}/': EC2Response.dispatch,
}
|
ProgVal/cjdns
|
refs/heads/master
|
node_build/dependencies/libuv/build/gyp/test/win/gyptest-macro-vcinstalldir.py
|
344
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure macro expansion of $(VCInstallDir) is handled, and specifically
always / terminated for compatibility.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'vs-macros'
test.run_gyp('vcinstalldir.gyp', chdir=CHDIR)
# This fails on VS because the trailing slash escapes the trailing quote.
test.build('vcinstalldir.gyp', 'test_slash_trailing', chdir=CHDIR, status=1)
test.build('vcinstalldir.gyp', 'test_slash_dir', chdir=CHDIR)
test.pass_test()
|
puracore/pura
|
refs/heads/master
|
qa/rpc-tests/getchaintips.py
|
66
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the getchaintips API. We introduce a network split, work
# on chains of different lengths, and join the network together again.
# This gives us two tips, verify that it works.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class GetChainTipsTest (BitcoinTestFramework):
def run_test (self):
BitcoinTestFramework.run_test (self)
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 1)
assert_equal (tips[0]['branchlen'], 0)
assert_equal (tips[0]['height'], 200)
assert_equal (tips[0]['status'], 'active')
# Split the network and build two chains of different lengths.
self.split_network ()
self.nodes[0].generate(10)
self.nodes[2].generate(20)
self.sync_all ()
tips = self.nodes[1].getchaintips ()
assert_equal (len (tips), 1)
shortTip = tips[0]
assert_equal (shortTip['branchlen'], 0)
assert_equal (shortTip['height'], 210)
assert_equal (tips[0]['status'], 'active')
tips = self.nodes[3].getchaintips ()
assert_equal (len (tips), 1)
longTip = tips[0]
assert_equal (longTip['branchlen'], 0)
assert_equal (longTip['height'], 220)
assert_equal (tips[0]['status'], 'active')
# Join the network halves and check that we now have two tips
# (at least at the nodes that previously had the short chain).
self.join_network ()
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 2)
assert_equal (tips[0], longTip)
assert_equal (tips[1]['branchlen'], 10)
assert_equal (tips[1]['status'], 'valid-fork')
tips[1]['branchlen'] = 0
tips[1]['status'] = 'active'
assert_equal (tips[1], shortTip)
if __name__ == '__main__':
GetChainTipsTest ().main ()
|
chudaol/edx-platform
|
refs/heads/master
|
lms/djangoapps/dashboard/management/commands/tests/test_git_add_course.py
|
101
|
"""
Provide tests for git_add_course management command.
"""
import logging
import os
import shutil
import StringIO
import subprocess
import unittest
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from opaque_keys.edx.locations import SlashSeparatedCourseKey
import dashboard.git_import as git_import
from dashboard.git_import import GitImportError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
TEST_MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'test_xlog',
}
FEATURES_WITH_SSL_AUTH = settings.FEATURES.copy()
FEATURES_WITH_SSL_AUTH['AUTH_USE_CERTIFICATES'] = True
@override_settings(MONGODB_LOG=TEST_MONGODB_LOG)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestGitAddCourse(ModuleStoreTestCase):
"""
Tests the git_add_course management command for proper functions.
"""
TEST_REPO = 'https://github.com/mitocw/edx4edx_lite.git'
TEST_COURSE = 'MITx/edx4edx/edx4edx'
TEST_BRANCH = 'testing_do_not_delete'
TEST_BRANCH_COURSE = SlashSeparatedCourseKey('MITx', 'edx4edx_branch', 'edx4edx')
GIT_REPO_DIR = getattr(settings, 'GIT_REPO_DIR')
def assertCommandFailureRegexp(self, regex, *args):
"""
Convenience function for testing command failures
"""
with self.assertRaises(SystemExit):
with self.assertRaisesRegexp(CommandError, regex):
call_command('git_add_course', *args,
stderr=StringIO.StringIO())
def test_command_args(self):
"""
Validate argument checking
"""
self.assertCommandFailureRegexp(
'This script requires at least one argument, the git URL')
self.assertCommandFailureRegexp(
'Expected no more than three arguments; recieved 4',
'blah', 'blah', 'blah', 'blah')
self.assertCommandFailureRegexp(
'Repo was not added, check log output for details',
'blah')
# Test successful import from command
if not os.path.isdir(self.GIT_REPO_DIR):
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
# Make a course dir that will be replaced with a symlink
# while we are at it.
if not os.path.isdir(self.GIT_REPO_DIR / 'edx4edx'):
os.mkdir(self.GIT_REPO_DIR / 'edx4edx')
call_command('git_add_course', self.TEST_REPO,
self.GIT_REPO_DIR / 'edx4edx_lite')
# Test with all three args (branch)
call_command('git_add_course', self.TEST_REPO,
self.GIT_REPO_DIR / 'edx4edx_lite',
self.TEST_BRANCH)
def test_add_repo(self):
"""
Various exit path tests for test_add_repo
"""
with self.assertRaisesRegexp(GitImportError, GitImportError.NO_DIR):
git_import.add_repo(self.TEST_REPO, None, None)
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
with self.assertRaisesRegexp(GitImportError, GitImportError.URL_BAD):
git_import.add_repo('foo', None, None)
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
git_import.add_repo('file:///foobar.git', None, None)
# Test git repo that exists, but is "broken"
bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))
os.mkdir(bare_repo)
self.addCleanup(shutil.rmtree, bare_repo)
subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,
cwd=bare_repo)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
def test_detached_repo(self):
"""
Test repo that is in detached head state.
"""
repo_dir = self.GIT_REPO_DIR
# Test successful import from command
try:
os.mkdir(repo_dir)
except OSError:
pass
self.addCleanup(shutil.rmtree, repo_dir)
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', None)
subprocess.check_output(['git', 'checkout', 'HEAD~2', ],
stderr=subprocess.STDOUT,
cwd=repo_dir / 'edx4edx_lite')
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', None)
def test_branching(self):
"""
Exercise branching code of import
"""
repo_dir = self.GIT_REPO_DIR
# Test successful import from command
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
# Checkout non existent branch
with self.assertRaisesRegexp(GitImportError, GitImportError.REMOTE_BRANCH_MISSING):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', 'asdfasdfasdf')
# Checkout new branch
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
self.TEST_BRANCH)
def_ms = modulestore()
# Validate that it is different than master
self.assertIsNotNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
# Attempt to check out the same branch again to validate branch choosing
# works
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
self.TEST_BRANCH)
# Delete to test branching back to master
def_ms.delete_course(self.TEST_BRANCH_COURSE, ModuleStoreEnum.UserID.test)
self.assertIsNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
'master')
self.assertIsNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
self.assertIsNotNone(def_ms.get_course(SlashSeparatedCourseKey.from_deprecated_string(self.TEST_COURSE)))
def test_branch_exceptions(self):
"""
This wil create conditions to exercise bad paths in the switch_branch function.
"""
# create bare repo that we can mess with and attempt an import
bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))
os.mkdir(bare_repo)
self.addCleanup(shutil.rmtree, bare_repo)
subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,
cwd=bare_repo)
# Build repo dir
repo_dir = self.GIT_REPO_DIR
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
rdir = '{0}/bare'.format(repo_dir)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
# Get logger for checking strings in logs
output = StringIO.StringIO()
test_log_handler = logging.StreamHandler(output)
test_log_handler.setLevel(logging.DEBUG)
glog = git_import.log
glog.addHandler(test_log_handler)
# Move remote so fetch fails
shutil.move(bare_repo, '{0}/not_bare.git'.format(settings.TEST_ROOT))
try:
git_import.switch_branch('master', rdir)
except GitImportError:
self.assertIn('Unable to fetch remote', output.getvalue())
shutil.move('{0}/not_bare.git'.format(settings.TEST_ROOT), bare_repo)
output.truncate(0)
# Replace origin with a different remote
subprocess.check_output(
['git', 'remote', 'rename', 'origin', 'blah', ],
stderr=subprocess.STDOUT, cwd=rdir
)
with self.assertRaises(GitImportError):
git_import.switch_branch('master', rdir)
self.assertIn('Getting a list of remote branches failed', output.getvalue())
|
carnal0wnage/pupy
|
refs/heads/master
|
pupy/pupylib/PupyService.py
|
33
|
# -*- coding: UTF8 -*-
# --------------------------------------------------------------
# Copyright (c) 2015, Nicolas VERDIER (contact@n1nj4.eu)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
# --------------------------------------------------------------
import rpyc.core.service
import rpyc
import threading
import sys
import ssl
class PupyService(rpyc.Service):
def __init__(self, *args, **kwargs):
super(PupyService, self).__init__(*args, **kwargs)
self.pupy_srv=glob_pupyServer
def on_connect(self):
# code that runs when a connection is created
# (to init the serivce, if needed)
self._conn._config.update(dict(
allow_safe_attrs = True,
allow_public_attrs = False,
allow_pickle = False,
allow_getattr = True,
allow_setattr = False,
allow_delattr = False,
import_custom_exceptions = False,
instantiate_custom_exceptions = False,
instantiate_oldstyle_exceptions = False,
))
#self._conn._config["safe_attrs"].add("__iter__")
#self._conn._config["safe_attrs"].add("readline")
self.modules=None
#some aliases :
self.namespace=self._conn.root.namespace
self.execute=self._conn.root.execute
self.exit=self._conn.root.exit
self.eval=self._conn.root.eval
self.builtin=self.modules.__builtin__
self.builtins=self.modules.__builtin__
self.exposed_stdin=sys.stdin
self.exposed_stdout=sys.stdout
self.exposed_stderr=sys.stderr
self.pupy_srv.add_client(self)
def on_disconnect(self):
self.pupy_srv.remove_client(self)
def exposed_set_modules(self, modules):
self.modules=modules
|
edx/lettuce
|
refs/heads/master
|
lettuce/django/steps/models.py
|
6
|
"""
Step definitions for working with Django models.
"""
from datetime import datetime
import re
from django.core.management import call_command
from django.core.management.color import no_style
from django.db import connection
try:
from django.db.models.loading import get_models
except ImportError:
from django.apps import apps
get_models = apps.get_models
from django.utils.functional import curry
from functools import wraps
from lettuce import step
STEP_PREFIX = r'(?:Given|And|Then|When) '
def _models_generator():
"""
Build a hash of model verbose names to models
"""
for model in get_models():
yield (unicode(model._meta.verbose_name), model)
yield (unicode(model._meta.verbose_name_plural), model)
MODELS = dict(_models_generator())
_WRITE_MODEL = {}
def creates_models(model):
"""
Register a model-specific creation function. Wrapper around writes_models
that removes the field parameter (always a create operation).
"""
def decorated(func):
@wraps(func)
@writes_models(model)
def wrapped(data, field):
if field:
raise NotImplementedError(
"Must use the writes_models decorator to update models")
return func(data)
return decorated
def writes_models(model):
"""
Register a model-specific create and update function.
"""
def decorated(func):
"""
Decorator for the creation function.
"""
_WRITE_MODEL[model] = func
return func
return decorated
_MODEL_EXISTS = {}
def checks_existence(model):
"""
Register a model-specific existence check function.
"""
def decorated(func):
"""
Decorator for the existence function.
"""
_MODEL_EXISTS[model] = func
return func
return decorated
def hash_data(hash_):
"""
Convert strings from a Lettuce hash to appropriate types
"""
res = {}
for key, value in hash_.items():
if type(value) in (str, unicode):
if value == "true":
value = True
elif value == "false":
value = False
elif value == "null":
value = None
elif value.isdigit() and not re.match("^0[0-9]+", value):
value = int(value)
elif re.match(r'^\d{4}-\d{2}-\d{2}$', value):
value = datetime.strptime(value, "%Y-%m-%d")
res[key] = value
return res
def hashes_data(step):
"""
Convert strings from step hashes to appropriate types
"""
return [hash_data(hash_) for hash_ in step.hashes]
def get_model(model):
"""
Convert a model's verbose name to the model class. This allows us to
use the models verbose name in steps.
"""
name = model.lower()
model = MODELS.get(model, None)
assert model, "Could not locate model by name '%s'" % name
return model
def reset_sequence(model):
"""
Reset the ID sequence for a model.
"""
sql = connection.ops.sequence_reset_sql(no_style(), [model])
for cmd in sql:
connection.cursor().execute(cmd)
def create_models(model, data):
"""
Create models for each data hash. Wrapper around write_models.
"""
return write_models(model, data, None)
def write_models(model, data, field=None):
"""
Create or update models for each data hash. If field is present, it is the
field that is used to get the existing models out of the database to update
them; otherwise, new models are created.
"""
if hasattr(data, 'hashes'):
data = hashes_data(data)
written = []
for hash_ in data:
if field:
if field not in hash_:
raise KeyError(("The \"%s\" field is required for all update "
"operations") % field)
model_kwargs = {field: hash_[field]}
model_obj = model.objects.get(**model_kwargs)
for to_set, val in hash_.items():
setattr(model_obj, to_set, val)
model_obj.save()
else:
model_obj = model.objects.create(**hash_)
written.append(model_obj)
reset_sequence(model)
return written
def _dump_model(model, attrs=None):
"""
Dump the model fields for debugging.
"""
for field in model._meta.fields:
print '%s=%s,' % (field.name, str(getattr(model, field.name))),
if attrs is not None:
for attr in attrs:
print '%s=%s,' % (attr, str(getattr(model, attr))),
for field in model._meta.many_to_many:
vals = getattr(model, field.name)
print '%s=%s (%i),' % (
field.name,
', '.join(map(str, vals.all())),
vals.count()),
print
def models_exist(model, data, queryset=None):
"""
Check whether the models defined by @data exist in the @queryset.
"""
if hasattr(data, 'hashes'):
data = hashes_data(data)
if not queryset:
queryset = model.objects
failed = 0
try:
for hash_ in data:
fields = {}
extra_attrs = {}
for k, v in hash_.iteritems():
if k.startswith('@'):
# this is an attribute
extra_attrs[k[1:]] = v
else:
fields[k] = v
filtered = queryset.filter(**fields)
match = False
if filtered.exists():
for obj in filtered.all():
if all(getattr(obj, k) == v
for k, v in extra_attrs.iteritems()):
match = True
break
assert match, \
"%s does not exist: %s\n%s" % (
model.__name__, hash_, filtered.query)
except AssertionError as exc:
print exc
failed += 1
if failed:
print "Rows in DB are:"
for model in queryset.all():
_dump_model(model, extra_attrs.keys())
raise AssertionError("%i rows missing" % failed)
for txt in (
(r'I have(?: an?)? ([a-z][a-z0-9_ ]*) in the database:'),
(r'I update(?: an?)? existing ([a-z][a-z0-9_ ]*) by ([a-z][a-z0-9_]*) '
'in the database:'),
):
@step(txt)
def write_models_generic(step, model, field=None):
"""
And I have foos in the database:
| name | bar |
| Baz | Quux |
And I update existing foos by pk in the database:
| pk | name |
| 1 | Bar |
The generic method can be overridden for a specific model by defining a
function write_badgers(step, field), which creates and updates
the Badger model and decorating it with the writes_models(model_class)
decorator.
@writes_models(Profile)
def write_profile(step, field):
'''Creates a Profile model'''
for hash_ in hashes_data(step):
if field:
profile = Profile.objects.get(**{field: hash_[field]})
else:
profile = Profile()
...
"""
model = get_model(model)
try:
func = _WRITE_MODEL[model]
except KeyError:
func = curry(write_models, model)
func(step, field)
@step(STEP_PREFIX + r'([A-Z][a-z0-9_ ]*) with ([a-z]+) "([^"]*)"' +
r' has(?: an?)? ([A-Z][a-z0-9_ ]*) in the database:')
def create_models_for_relation(step, rel_model_name,
rel_key, rel_value, model):
"""
And project with name "Ball Project" has goals in the database:
| description |
| To have fun playing with balls of twine |
"""
lookup = {rel_key: rel_value}
rel_model = get_model(rel_model_name).objects.get(**lookup)
for hash_ in step.hashes:
hash_['%s' % rel_model_name] = rel_model
write_models_generic(step, model)
@step(STEP_PREFIX + r'([A-Z][a-z0-9_ ]*) with ([a-z]+) "([^"]*)"' +
r' is linked to ([A-Z][a-z0-9_ ]*) in the database:')
def create_m2m_links(step, rel_model_name, rel_key, rel_value, relation_name):
"""
And article with name "Guidelines" is linked to tags in the database:
| name |
| coding |
| style |
"""
lookup = {rel_key: rel_value}
rel_model = get_model(rel_model_name).objects.get(**lookup)
relation = None
for m2m in rel_model._meta.many_to_many:
if relation_name in (m2m.name, m2m.verbose_name):
relation = getattr(rel_model, m2m.name)
break
if not relation:
try:
relation = getattr(rel_model, relation_name)
except AttributeError:
pass
assert relation, \
"%s does not have a many-to-many relation named '%s'" % (
rel_model._meta.verbose_name.capitalize(),
relation_name,
)
m2m_model = relation.model
for hash_ in step.hashes:
relation.add(m2m_model.objects.get(**hash_))
@step(STEP_PREFIX + r'(?:an? )?([A-Z][a-z0-9_ ]*) should be present ' +
r'in the database')
def models_exist_generic(step, model):
"""
And objectives should be present in the database:
| description |
| Make a mess |
"""
model = get_model(model)
try:
func = _MODEL_EXISTS[model]
except KeyError:
func = curry(models_exist, model)
func(step)
@step(r'There should be (\d+) ([a-z][a-z0-9_ ]*) in the database')
def model_count(step, count, model):
"""
Then there should be 0 goals in the database
"""
model = get_model(model)
expected = int(count)
found = model.objects.count()
assert found == expected, "Expected %d %s, found %d." % \
(expected, model._meta.verbose_name_plural, found)
def clean_db(scenario):
"""
Clean the DB after each scenario
Usage: after.each_scenario(clean_db)
"""
call_command('flush', interactive=False)
|
CivicKnowledge/metaeditor
|
refs/heads/master
|
accounts/tests/test_models.py
|
1
|
# -*- coding: utf-8 -*-
import fudge
from django.contrib.auth.models import User
from django.test import TestCase
class SendSuccessRegistrationTest(TestCase):
@fudge.patch(
'accounts.models.mail_admins')
def test_sends_email_on_user_creation(self, fake_mail):
fake_mail.expects_call()
User.objects.create_user('user1')
@fudge.patch(
'accounts.models.mail_admins',
'accounts.models.logger.error')
def test_logs_error(self, fake_mail, fake_error):
fake_mail.expects_call()\
.raises(Exception('My fake exception'))
fake_error.expects_call()
User.objects.create_user('user1')
|
dash-dash/AutobahnPython
|
refs/heads/master
|
examples/twisted/websocket/streaming/frame_based_server.py
|
2
|
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import hashlib
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class FrameBasedHashServerProtocol(WebSocketServerProtocol):
"""
Frame-based WebSockets server that computes a running SHA-256 for message
data received. It will respond after every frame received with the digest
computed up to that point. It can receive messages of unlimited number
of frames. Digest is reset upon new message.
"""
def onMessageBegin(self, isBinary):
WebSocketServerProtocol.onMessageBegin(self, isBinary)
self.sha256 = hashlib.sha256()
def onMessageFrame(self, payload):
l = 0
for data in payload:
l += len(data)
self.sha256.update(data)
digest = self.sha256.hexdigest()
print("Received frame with payload length {}, compute digest: {}".format(l, digest))
self.sendMessage(digest.encode('utf8'))
def onMessageEnd(self):
self.sha256 = None
if __name__ == '__main__':
factory = WebSocketServerFactory("ws://localhost:9000")
factory.protocol = FrameBasedHashServerProtocol
enableCompression = False
if enableCompression:
from autobahn.websocket.compress import PerMessageDeflateOffer, \
PerMessageDeflateOfferAccept
# Function to accept offers from the client ..
def accept(offers):
for offer in offers:
if isinstance(offer, PerMessageDeflateOffer):
return PerMessageDeflateOfferAccept(offer)
factory.setProtocolOptions(perMessageCompressionAccept=accept)
listenWS(factory)
reactor.run()
|
hammertoe/didactic-spork
|
refs/heads/master
|
lib/flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py
|
629
|
from flask import Blueprint, render_template
frontend = Blueprint('frontend', __name__, template_folder='templates')
@frontend.route('/')
def index():
return render_template('frontend/index.html')
|
aESeguridad/GERE
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/pyparsing.py
|
49
|
# module pyparsing.py
#
# Copyright (c) 2003-2015 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars
The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.
Here is a program to parse "Hello, World!" (or any greeting of the form C{"<salutation>, <addressee>!"})::
from pyparsing import Word, alphas
# define grammar of a greeting
greet = Word( alphas ) + "," + Word( alphas ) + "!"
hello = "Hello, World!"
print (hello, "->", greet.parseString( hello ))
The program outputs the following::
Hello, World! -> ['Hello', ',', 'World', '!']
The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.
The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an
object with named attributes.
The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings
- embedded comments
"""
__version__ = "2.1.0"
__versionTime__ = "7 Feb 2016 14:09"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
from weakref import ref as wkref
import copy
import sys
import warnings
import re
import sre_constants
import collections
import pprint
import functools
import itertools
import traceback
#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
__all__ = [
'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
]
PY_3 = sys.version.startswith('3')
if PY_3:
_MAX_INT = sys.maxsize
basestring = str
unichr = chr
_ustr = str
# build list of single arg builtins, that can be used as parse actions
singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
else:
_MAX_INT = sys.maxint
range = xrange
def _ustr(obj):
"""Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
then < returns the unicode object | encodes it with the default encoding | ... >.
"""
if isinstance(obj,unicode):
return obj
try:
# If this works, then _ustr(obj) has the same behaviour as str(obj), so
# it won't break any existing code.
return str(obj)
except UnicodeEncodeError:
# Else encode it
ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
xmlcharref = Regex('&#\d+;')
xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
return xmlcharref.transformString(ret)
# build list of single arg builtins, tolerant of Python version, that can be used as parse actions
singleArgBuiltins = []
import __builtin__
for fname in "sum len sorted reversed list tuple set any all min max".split():
try:
singleArgBuiltins.append(getattr(__builtin__,fname))
except AttributeError:
continue
_generatorType = type((y for y in range(1)))
def _xml_escape(data):
"""Escape &, <, >, ", ', etc. in a string of data."""
# ampersand must be replaced first
from_symbols = '&><"\''
to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
for from_,to_ in zip(from_symbols, to_symbols):
data = data.replace(from_, to_)
return data
class _Constants(object):
pass
alphas = string.ascii_uppercase + string.ascii_lowercase
nums = "0123456789"
hexnums = nums + "ABCDEFabcdef"
alphanums = alphas + nums
_bslash = chr(92)
printables = "".join(c for c in string.printable if c not in string.whitespace)
class ParseBaseException(Exception):
"""base exception class for all parsing runtime exceptions"""
# Performance tuning: we construct a *lot* of these, so keep this
# constructor as small and fast as possible
def __init__( self, pstr, loc=0, msg=None, elem=None ):
self.loc = loc
if msg is None:
self.msg = pstr
self.pstr = ""
else:
self.msg = msg
self.pstr = pstr
self.parserElement = elem
def __getattr__( self, aname ):
"""supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
if( aname == "lineno" ):
return lineno( self.loc, self.pstr )
elif( aname in ("col", "column") ):
return col( self.loc, self.pstr )
elif( aname == "line" ):
return line( self.loc, self.pstr )
else:
raise AttributeError(aname)
def __str__( self ):
return "%s (at char %d), (line:%d, col:%d)" % \
( self.msg, self.loc, self.lineno, self.column )
def __repr__( self ):
return _ustr(self)
def markInputline( self, markerString = ">!<" ):
"""Extracts the exception line from the input string, and marks
the location of the exception with a special symbol.
"""
line_str = self.line
line_column = self.column - 1
if markerString:
line_str = "".join((line_str[:line_column],
markerString, line_str[line_column:]))
return line_str.strip()
def __dir__(self):
return "lineno col line".split() + dir(type(self))
class ParseException(ParseBaseException):
"""exception thrown when parse expressions don't match class;
supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
pass
class ParseFatalException(ParseBaseException):
"""user-throwable exception thrown when inconsistent parse content
is found; stops all parsing immediately"""
pass
class ParseSyntaxException(ParseFatalException):
"""just like C{L{ParseFatalException}}, but thrown internally when an
C{L{ErrorStop<And._ErrorStop>}} ('-' operator) indicates that parsing is to stop immediately because
an unbacktrackable syntax error has been found"""
def __init__(self, pe):
super(ParseSyntaxException, self).__init__(
pe.pstr, pe.loc, pe.msg, pe.parserElement)
#~ class ReparseException(ParseBaseException):
#~ """Experimental class - parse actions can raise this exception to cause
#~ pyparsing to reparse the input string:
#~ - with a modified input string, and/or
#~ - with a modified start location
#~ Set the values of the ReparseException in the constructor, and raise the
#~ exception in a parse action to cause pyparsing to use the new string/location.
#~ Setting the values as None causes no change to be made.
#~ """
#~ def __init_( self, newstring, restartLoc ):
#~ self.newParseText = newstring
#~ self.reparseLoc = restartLoc
class RecursiveGrammarException(Exception):
"""exception thrown by C{validate()} if the grammar could be improperly recursive"""
def __init__( self, parseElementList ):
self.parseElementTrace = parseElementList
def __str__( self ):
return "RecursiveGrammarException: %s" % self.parseElementTrace
class _ParseResultsWithOffset(object):
def __init__(self,p1,p2):
self.tup = (p1,p2)
def __getitem__(self,i):
return self.tup[i]
def __repr__(self):
return repr(self.tup)
def setOffset(self,i):
self.tup = (self.tup[0],i)
class ParseResults(object):
"""Structured parse results, to provide multiple means of access to the parsed data:
- as a list (C{len(results)})
- by list index (C{results[0], results[1]}, etc.)
- by attribute (C{results.<resultsName>})
"""
def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
if isinstance(toklist, cls):
return toklist
retobj = object.__new__(cls)
retobj.__doinit = True
return retobj
# Performance tuning: we construct a *lot* of these, so keep this
# constructor as small and fast as possible
def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
if self.__doinit:
self.__doinit = False
self.__name = None
self.__parent = None
self.__accumNames = {}
self.__asList = asList
self.__modal = modal
if toklist is None:
toklist = []
if isinstance(toklist, list):
self.__toklist = toklist[:]
elif isinstance(toklist, _generatorType):
self.__toklist = list(toklist)
else:
self.__toklist = [toklist]
self.__tokdict = dict()
if name is not None and name:
if not modal:
self.__accumNames[name] = 0
if isinstance(name,int):
name = _ustr(name) # will always return a str, but use _ustr for consistency
self.__name = name
if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
if isinstance(toklist,basestring):
toklist = [ toklist ]
if asList:
if isinstance(toklist,ParseResults):
self[name] = _ParseResultsWithOffset(toklist.copy(),0)
else:
self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
self[name].__name = name
else:
try:
self[name] = toklist[0]
except (KeyError,TypeError,IndexError):
self[name] = toklist
def __getitem__( self, i ):
if isinstance( i, (int,slice) ):
return self.__toklist[i]
else:
if i not in self.__accumNames:
return self.__tokdict[i][-1][0]
else:
return ParseResults([ v[0] for v in self.__tokdict[i] ])
def __setitem__( self, k, v, isinstance=isinstance ):
if isinstance(v,_ParseResultsWithOffset):
self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
sub = v[0]
elif isinstance(k,int):
self.__toklist[k] = v
sub = v
else:
self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
sub = v
if isinstance(sub,ParseResults):
sub.__parent = wkref(self)
def __delitem__( self, i ):
if isinstance(i,(int,slice)):
mylen = len( self.__toklist )
del self.__toklist[i]
# convert int to slice
if isinstance(i, int):
if i < 0:
i += mylen
i = slice(i, i+1)
# get removed indices
removed = list(range(*i.indices(mylen)))
removed.reverse()
# fixup indices in token dictionary
#~ for name in self.__tokdict:
#~ occurrences = self.__tokdict[name]
#~ for j in removed:
#~ for k, (value, position) in enumerate(occurrences):
#~ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
for name,occurrences in self.__tokdict.items():
for j in removed:
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
else:
del self.__tokdict[i]
def __contains__( self, k ):
return k in self.__tokdict
def __len__( self ): return len( self.__toklist )
def __bool__(self): return ( not not self.__toklist )
__nonzero__ = __bool__
def __iter__( self ): return iter( self.__toklist )
def __reversed__( self ): return iter( self.__toklist[::-1] )
def iterkeys( self ):
"""Returns all named result keys."""
if hasattr(self.__tokdict, "iterkeys"):
return self.__tokdict.iterkeys()
else:
return iter(self.__tokdict)
def itervalues( self ):
"""Returns all named result values."""
return (self[k] for k in self.iterkeys())
def iteritems( self ):
return ((k, self[k]) for k in self.iterkeys())
if PY_3:
keys = iterkeys
values = itervalues
items = iteritems
else:
def keys( self ):
"""Returns all named result keys."""
return list(self.iterkeys())
def values( self ):
"""Returns all named result values."""
return list(self.itervalues())
def items( self ):
"""Returns all named result keys and values as a list of tuples."""
return list(self.iteritems())
def haskeys( self ):
"""Since keys() returns an iterator, this method is helpful in bypassing
code that looks for the existence of any defined results names."""
return bool(self.__tokdict)
def pop( self, *args, **kwargs):
"""Removes and returns item at specified index (default=last).
Supports both list and dict semantics for pop(). If passed no
argument or an integer argument, it will use list semantics
and pop tokens from the list of parsed tokens. If passed a
non-integer argument (most likely a string), it will use dict
semantics and pop the corresponding value from any defined
results names. A second default return value argument is
supported, just as in dict.pop()."""
if not args:
args = [-1]
for k,v in kwargs.items():
if k == 'default':
args = (args[0], v)
else:
raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
if (isinstance(args[0], int) or
len(args) == 1 or
args[0] in self):
index = args[0]
ret = self[index]
del self[index]
return ret
else:
defaultvalue = args[1]
return defaultvalue
def get(self, key, defaultValue=None):
"""Returns named result matching the given key, or if there is no
such name, then returns the given C{defaultValue} or C{None} if no
C{defaultValue} is specified."""
if key in self:
return self[key]
else:
return defaultValue
def insert( self, index, insStr ):
"""Inserts new element at location index in the list of parsed tokens."""
self.__toklist.insert(index, insStr)
# fixup indices in token dictionary
#~ for name in self.__tokdict:
#~ occurrences = self.__tokdict[name]
#~ for k, (value, position) in enumerate(occurrences):
#~ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
for name,occurrences in self.__tokdict.items():
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
def append( self, item ):
"""Add single element to end of ParseResults list of elements."""
self.__toklist.append(item)
def extend( self, itemseq ):
"""Add sequence of elements to end of ParseResults list of elements."""
if isinstance(itemseq, ParseResults):
self += itemseq
else:
self.__toklist.extend(itemseq)
def clear( self ):
"""Clear all elements and results names."""
del self.__toklist[:]
self.__tokdict.clear()
def __getattr__( self, name ):
try:
return self[name]
except KeyError:
return ""
if name in self.__tokdict:
if name not in self.__accumNames:
return self.__tokdict[name][-1][0]
else:
return ParseResults([ v[0] for v in self.__tokdict[name] ])
else:
return ""
def __add__( self, other ):
ret = self.copy()
ret += other
return ret
def __iadd__( self, other ):
if other.__tokdict:
offset = len(self.__toklist)
addoffset = lambda a: offset if a<0 else a+offset
otheritems = other.__tokdict.items()
otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
for (k,vlist) in otheritems for v in vlist]
for k,v in otherdictitems:
self[k] = v
if isinstance(v[0],ParseResults):
v[0].__parent = wkref(self)
self.__toklist += other.__toklist
self.__accumNames.update( other.__accumNames )
return self
def __radd__(self, other):
if isinstance(other,int) and other == 0:
# useful for merging many ParseResults using sum() builtin
return self.copy()
else:
# this may raise a TypeError - so be it
return other + self
def __repr__( self ):
return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
def __str__( self ):
return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
def _asStringList( self, sep='' ):
out = []
for item in self.__toklist:
if out and sep:
out.append(sep)
if isinstance( item, ParseResults ):
out += item._asStringList()
else:
out.append( _ustr(item) )
return out
def asList( self ):
"""Returns the parse results as a nested list of matching tokens, all converted to strings."""
return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
def asDict( self ):
"""Returns the named parse results as a nested dictionary."""
if PY_3:
item_fn = self.items
else:
item_fn = self.iteritems
return dict((k,v.asDict()) if isinstance(v, ParseResults) else (k,v) for k,v in item_fn())
def copy( self ):
"""Returns a new copy of a C{ParseResults} object."""
ret = ParseResults( self.__toklist )
ret.__tokdict = self.__tokdict.copy()
ret.__parent = self.__parent
ret.__accumNames.update( self.__accumNames )
ret.__name = self.__name
return ret
def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
"""Returns the parse results as XML. Tags are created for tokens and lists that have defined results names."""
nl = "\n"
out = []
namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
for v in vlist)
nextLevelIndent = indent + " "
# collapse out indents if formatting is not desired
if not formatted:
indent = ""
nextLevelIndent = ""
nl = ""
selfTag = None
if doctag is not None:
selfTag = doctag
else:
if self.__name:
selfTag = self.__name
if not selfTag:
if namedItemsOnly:
return ""
else:
selfTag = "ITEM"
out += [ nl, indent, "<", selfTag, ">" ]
for i,res in enumerate(self.__toklist):
if isinstance(res,ParseResults):
if i in namedItems:
out += [ res.asXML(namedItems[i],
namedItemsOnly and doctag is None,
nextLevelIndent,
formatted)]
else:
out += [ res.asXML(None,
namedItemsOnly and doctag is None,
nextLevelIndent,
formatted)]
else:
# individual token, see if there is a name for it
resTag = None
if i in namedItems:
resTag = namedItems[i]
if not resTag:
if namedItemsOnly:
continue
else:
resTag = "ITEM"
xmlBodyText = _xml_escape(_ustr(res))
out += [ nl, nextLevelIndent, "<", resTag, ">",
xmlBodyText,
"</", resTag, ">" ]
out += [ nl, indent, "</", selfTag, ">" ]
return "".join(out)
def __lookup(self,sub):
for k,vlist in self.__tokdict.items():
for v,loc in vlist:
if sub is v:
return k
return None
def getName(self):
"""Returns the results name for this token expression."""
if self.__name:
return self.__name
elif self.__parent:
par = self.__parent()
if par:
return par.__lookup(self)
else:
return None
elif (len(self) == 1 and
len(self.__tokdict) == 1 and
self.__tokdict.values()[0][0][1] in (0,-1)):
return self.__tokdict.keys()[0]
else:
return None
def dump(self,indent='',depth=0):
"""Diagnostic method for listing out the contents of a C{ParseResults}.
Accepts an optional C{indent} argument so that this string can be embedded
in a nested display of other data."""
out = []
NL = '\n'
out.append( indent+_ustr(self.asList()) )
if self.haskeys():
items = sorted(self.items())
for k,v in items:
if out:
out.append(NL)
out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
if isinstance(v,ParseResults):
if v:
out.append( v.dump(indent,depth+1) )
else:
out.append(_ustr(v))
else:
out.append(_ustr(v))
elif any(isinstance(vv,ParseResults) for vv in self):
v = self
for i,vv in enumerate(v):
if isinstance(vv,ParseResults):
out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
else:
out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
return "".join(out)
def pprint(self, *args, **kwargs):
"""Pretty-printer for parsed results as a list, using the C{pprint} module.
Accepts additional positional or keyword args as defined for the
C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})"""
pprint.pprint(self.asList(), *args, **kwargs)
# add support for pickle protocol
def __getstate__(self):
return ( self.__toklist,
( self.__tokdict.copy(),
self.__parent is not None and self.__parent() or None,
self.__accumNames,
self.__name ) )
def __setstate__(self,state):
self.__toklist = state[0]
(self.__tokdict,
par,
inAccumNames,
self.__name) = state[1]
self.__accumNames = {}
self.__accumNames.update(inAccumNames)
if par is not None:
self.__parent = wkref(par)
else:
self.__parent = None
def __getnewargs__(self):
return self.__toklist, self.__name, self.__asList, self.__modal
def __dir__(self):
return (dir(type(self)) + list(self.keys()))
collections.MutableMapping.register(ParseResults)
def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators.
The first column is number 1.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
s = strg
return 1 if loc<len(s) and s[loc] == '\n' else loc - s.rfind("\n", 0, loc)
def lineno(loc,strg):
"""Returns current line number within a string, counting newlines as line separators.
The first line is number 1.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
return strg.count("\n",0,loc) + 1
def line( loc, strg ):
"""Returns the line of text containing loc within a string, counting newlines as line separators.
"""
lastCR = strg.rfind("\n", 0, loc)
nextCR = strg.find("\n", loc)
if nextCR >= 0:
return strg[lastCR+1:nextCR]
else:
return strg[lastCR+1:]
def _defaultStartDebugAction( instring, loc, expr ):
print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
def _defaultExceptionDebugAction( instring, loc, expr, exc ):
print ("Exception raised:" + _ustr(exc))
def nullDebugAction(*args):
"""'Do-nothing' debug action, to suppress debugging output during parsing."""
pass
# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
#~ 'decorator to trim function calls to match the arity of the target'
#~ def _trim_arity(func, maxargs=3):
#~ if func in singleArgBuiltins:
#~ return lambda s,l,t: func(t)
#~ limit = 0
#~ foundArity = False
#~ def wrapper(*args):
#~ nonlocal limit,foundArity
#~ while 1:
#~ try:
#~ ret = func(*args[limit:])
#~ foundArity = True
#~ return ret
#~ except TypeError:
#~ if limit == maxargs or foundArity:
#~ raise
#~ limit += 1
#~ continue
#~ return wrapper
# this version is Python 2.x-3.x cross-compatible
'decorator to trim function calls to match the arity of the target'
def _trim_arity(func, maxargs=2):
if func in singleArgBuiltins:
return lambda s,l,t: func(t)
limit = [0]
foundArity = [False]
def wrapper(*args):
while 1:
try:
ret = func(*args[limit[0]:]) #~@$^*)+_(&%#!=-`~;:"[]{}
foundArity[0] = True
return ret
except TypeError:
# re-raise TypeErrors if they did not come from our arity testing
if foundArity[0]:
raise
else:
try:
tb = sys.exc_info()[-1]
exc_source_line = traceback.extract_tb(tb)[-1][-1]
if not exc_source_line.endswith('#~@$^*)+_(&%#!=-`~;:"[]{}'):
raise
finally:
del tb
if limit[0] <= maxargs:
limit[0] += 1
continue
raise
return wrapper
class ParserElement(object):
"""Abstract base level parser element class."""
DEFAULT_WHITE_CHARS = " \n\t\r"
verbose_stacktrace = False
@staticmethod
def setDefaultWhitespaceChars( chars ):
"""Overrides the default whitespace chars
"""
ParserElement.DEFAULT_WHITE_CHARS = chars
@staticmethod
def inlineLiteralsUsing(cls):
"""
Set class to be used for inclusion of string literals into a parser.
"""
ParserElement.literalStringClass = cls
def __init__( self, savelist=False ):
self.parseAction = list()
self.failAction = None
#~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
self.strRepr = None
self.resultsName = None
self.saveAsList = savelist
self.skipWhitespace = True
self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
self.copyDefaultWhiteChars = True
self.mayReturnEmpty = False # used when checking for left-recursion
self.keepTabs = False
self.ignoreExprs = list()
self.debug = False
self.streamlined = False
self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
self.errmsg = ""
self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
self.debugActions = ( None, None, None ) #custom debug actions
self.re = None
self.callPreparse = True # used to avoid redundant calls to preParse
self.callDuringTry = False
def copy( self ):
"""Make a copy of this C{ParserElement}. Useful for defining different parse actions
for the same parsing pattern, using copies of the original parse element."""
cpy = copy.copy( self )
cpy.parseAction = self.parseAction[:]
cpy.ignoreExprs = self.ignoreExprs[:]
if self.copyDefaultWhiteChars:
cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
return cpy
def setName( self, name ):
"""Define name for this expression, for use in debugging."""
self.name = name
self.errmsg = "Expected " + self.name
if hasattr(self,"exception"):
self.exception.msg = self.errmsg
return self
def setResultsName( self, name, listAllMatches=False ):
"""Define name for referencing matching tokens as a nested attribute
of the returned parse results.
NOTE: this returns a *copy* of the original C{ParserElement} object;
this is so that the client can define a basic element, such as an
integer, and reference it in multiple places with different names.
You can also set results names using the abbreviated syntax,
C{expr("name")} in place of C{expr.setResultsName("name")} -
see L{I{__call__}<__call__>}.
"""
newself = self.copy()
if name.endswith("*"):
name = name[:-1]
listAllMatches=True
newself.resultsName = name
newself.modalResults = not listAllMatches
return newself
def setBreak(self,breakFlag = True):
"""Method to invoke the Python pdb debugger when this element is
about to be parsed. Set C{breakFlag} to True to enable, False to
disable.
"""
if breakFlag:
_parseMethod = self._parse
def breaker(instring, loc, doActions=True, callPreParse=True):
import pdb
pdb.set_trace()
return _parseMethod( instring, loc, doActions, callPreParse )
breaker._originalParseMethod = _parseMethod
self._parse = breaker
else:
if hasattr(self._parse,"_originalParseMethod"):
self._parse = self._parse._originalParseMethod
return self
def setParseAction( self, *fns, **kwargs ):
"""Define action to perform when successfully matching parse element definition.
Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
- s = the original string being parsed (see note below)
- loc = the location of the matching substring
- toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
If the functions in fns modify the tokens, they can return them as the return
value from fn, and the modified list of tokens will replace the original.
Otherwise, fn does not need to return any value.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{parseString}<parseString>} for more information
on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
self.parseAction = list(map(_trim_arity, list(fns)))
self.callDuringTry = kwargs.get("callDuringTry", False)
return self
def addParseAction( self, *fns, **kwargs ):
"""Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}."""
self.parseAction += list(map(_trim_arity, list(fns)))
self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
return self
def addCondition(self, *fns, **kwargs):
"""Add a boolean predicate function to expression's list of parse actions. See
L{I{setParseAction}<setParseAction>}. Optional keyword argument C{message} can
be used to define a custom message to be used in the raised exception."""
msg = kwargs.get("message") or "failed user-defined condition"
for fn in fns:
def pa(s,l,t):
if not bool(_trim_arity(fn)(s,l,t)):
raise ParseException(s,l,msg)
return t
self.parseAction.append(pa)
self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
return self
def setFailAction( self, fn ):
"""Define action to perform if parsing fails at this expression.
Fail acton fn is a callable function that takes the arguments
C{fn(s,loc,expr,err)} where:
- s = string being parsed
- loc = location where expression match was attempted and failed
- expr = the parse expression that failed
- err = the exception thrown
The function returns no value. It may throw C{L{ParseFatalException}}
if it is desired to stop parsing immediately."""
self.failAction = fn
return self
def _skipIgnorables( self, instring, loc ):
exprsFound = True
while exprsFound:
exprsFound = False
for e in self.ignoreExprs:
try:
while 1:
loc,dummy = e._parse( instring, loc )
exprsFound = True
except ParseException:
pass
return loc
def preParse( self, instring, loc ):
if self.ignoreExprs:
loc = self._skipIgnorables( instring, loc )
if self.skipWhitespace:
wt = self.whiteChars
instrlen = len(instring)
while loc < instrlen and instring[loc] in wt:
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
return loc, []
def postParse( self, instring, loc, tokenlist ):
return tokenlist
#~ @profile
def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
debugging = ( self.debug ) #and doActions )
if debugging or self.failAction:
#~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
if (self.debugActions[0] ):
self.debugActions[0]( instring, loc, self )
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = preloc
try:
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
raise ParseException( instring, len(instring), self.errmsg, self )
except ParseBaseException as err:
#~ print ("Exception raised:", err)
if self.debugActions[2]:
self.debugActions[2]( instring, tokensStart, self, err )
if self.failAction:
self.failAction( instring, tokensStart, self, err )
raise
else:
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = preloc
if self.mayIndexError or loc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
raise ParseException( instring, len(instring), self.errmsg, self )
else:
loc,tokens = self.parseImpl( instring, preloc, doActions )
tokens = self.postParse( instring, loc, tokens )
retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
if self.parseAction and (doActions or self.callDuringTry):
if debugging:
try:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
except ParseBaseException as err:
#~ print "Exception raised in user parse action:", err
if (self.debugActions[2] ):
self.debugActions[2]( instring, tokensStart, self, err )
raise
else:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
return loc, retTokens
def tryParse( self, instring, loc ):
try:
return self._parse( instring, loc, doActions=False )[0]
except ParseFatalException:
raise ParseException( instring, loc, self.errmsg, self)
def canParseNext(self, instring, loc):
try:
self.tryParse(instring, loc)
except (ParseException, IndexError):
return False
else:
return True
# this method gets repeatedly called during backtracking with the same arguments -
# we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
lookup = (self,instring,loc,callPreParse,doActions)
if lookup in ParserElement._exprArgCache:
value = ParserElement._exprArgCache[ lookup ]
if isinstance(value, Exception):
raise value
return (value[0],value[1].copy())
else:
try:
value = self._parseNoCache( instring, loc, doActions, callPreParse )
ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy())
return value
except ParseBaseException as pe:
pe.__traceback__ = None
ParserElement._exprArgCache[ lookup ] = pe
raise
_parse = _parseNoCache
# argument cache for optimizing repeated calls when backtracking through recursive expressions
_exprArgCache = {}
@staticmethod
def resetCache():
ParserElement._exprArgCache.clear()
_packratEnabled = False
@staticmethod
def enablePackrat():
"""Enables "packrat" parsing, which adds memoizing to the parsing logic.
Repeated parse attempts at the same string location (which happens
often in many complex grammars) can immediately return a cached value,
instead of re-executing parsing/validating code. Memoizing is done of
both valid results and parsing exceptions.
This speedup may break existing programs that use parse actions that
have side-effects. For this reason, packrat parsing is disabled when
you first import pyparsing. To activate the packrat feature, your
program must call the class method C{ParserElement.enablePackrat()}. If
your program uses C{psyco} to "compile as you go", you must call
C{enablePackrat} before calling C{psyco.full()}. If you do not do this,
Python will crash. For best results, call C{enablePackrat()} immediately
after importing pyparsing.
"""
if not ParserElement._packratEnabled:
ParserElement._packratEnabled = True
ParserElement._parse = ParserElement._parseCache
def parseString( self, instring, parseAll=False ):
"""Execute the parse expression with the given string.
This is the main interface to the client code, once the complete
expression has been built.
If you want the grammar to require that the entire input string be
successfully parsed, then set C{parseAll} to True (equivalent to ending
the grammar with C{L{StringEnd()}}).
Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
in order to report proper column numbers in parse actions.
If the input string contains tabs and
the grammar uses parse actions that use the C{loc} argument to index into the
string being parsed, you can ensure you have a consistent view of the input
string by:
- calling C{parseWithTabs} on your grammar before calling C{parseString}
(see L{I{parseWithTabs}<parseWithTabs>})
- define your parse action using the full C{(s,loc,toks)} signature, and
reference the input string using the parse action's C{s} argument
- explictly expand the tabs in your input string before calling
C{parseString}
"""
ParserElement.resetCache()
if not self.streamlined:
self.streamline()
#~ self.saveAsList = True
for e in self.ignoreExprs:
e.streamline()
if not self.keepTabs:
instring = instring.expandtabs()
try:
loc, tokens = self._parse( instring, 0 )
if parseAll:
loc = self.preParse( instring, loc )
se = Empty() + StringEnd()
se._parse( instring, loc )
except ParseBaseException as exc:
if ParserElement.verbose_stacktrace:
raise
else:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc
else:
return tokens
def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
"""Scan the input string for expression matches. Each match will return the
matching tokens, start location, and end location. May be called with optional
C{maxMatches} argument, to clip scanning after 'n' matches are found. If
C{overlap} is specified, then overlapping matches will be reported.
Note that the start and end locations are reported relative to the string
being parsed. See L{I{parseString}<parseString>} for more information on parsing
strings with embedded tabs."""
if not self.streamlined:
self.streamline()
for e in self.ignoreExprs:
e.streamline()
if not self.keepTabs:
instring = _ustr(instring).expandtabs()
instrlen = len(instring)
loc = 0
preparseFn = self.preParse
parseFn = self._parse
ParserElement.resetCache()
matches = 0
try:
while loc <= instrlen and matches < maxMatches:
try:
preloc = preparseFn( instring, loc )
nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
except ParseException:
loc = preloc+1
else:
if nextLoc > loc:
matches += 1
yield tokens, preloc, nextLoc
if overlap:
nextloc = preparseFn( instring, loc )
if nextloc > loc:
loc = nextLoc
else:
loc += 1
else:
loc = nextLoc
else:
loc = preloc+1
except ParseBaseException as exc:
if ParserElement.verbose_stacktrace:
raise
else:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc
def transformString( self, instring ):
"""Extension to C{L{scanString}}, to modify matching text with modified tokens that may
be returned from a parse action. To use C{transformString}, define a grammar and
attach a parse action to it that modifies the returned token list.
Invoking C{transformString()} on a target string will then scan for matches,
and replace the matched text patterns according to the logic in the parse
action. C{transformString()} returns the resulting transformed string."""
out = []
lastE = 0
# force preservation of <TAB>s, to minimize unwanted transformation of string, and to
# keep string locs straight between transformString and scanString
self.keepTabs = True
try:
for t,s,e in self.scanString( instring ):
out.append( instring[lastE:s] )
if t:
if isinstance(t,ParseResults):
out += t.asList()
elif isinstance(t,list):
out += t
else:
out.append(t)
lastE = e
out.append(instring[lastE:])
out = [o for o in out if o]
return "".join(map(_ustr,_flatten(out)))
except ParseBaseException as exc:
if ParserElement.verbose_stacktrace:
raise
else:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc
def searchString( self, instring, maxMatches=_MAX_INT ):
"""Another extension to C{L{scanString}}, simplifying the access to the tokens found
to match the given parse expression. May be called with optional
C{maxMatches} argument, to clip searching after 'n' matches are found.
"""
try:
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
except ParseBaseException as exc:
if ParserElement.verbose_stacktrace:
raise
else:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc
def __add__(self, other ):
"""Implementation of + operator - returns C{L{And}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return And( [ self, other ] )
def __radd__(self, other ):
"""Implementation of + operator when left operand is not a C{L{ParserElement}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other + self
def __sub__(self, other):
"""Implementation of - operator, returns C{L{And}} with error stop"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return And( [ self, And._ErrorStop(), other ] )
def __rsub__(self, other ):
"""Implementation of - operator when left operand is not a C{L{ParserElement}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other - self
def __mul__(self,other):
"""Implementation of * operator, allows use of C{expr * 3} in place of
C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer
tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples
may also include C{None} as in:
- C{expr*(n,None)} or C{expr*(n,)} is equivalent
to C{expr*n + L{ZeroOrMore}(expr)}
(read as "at least n instances of C{expr}")
- C{expr*(None,n)} is equivalent to C{expr*(0,n)}
(read as "0 to n instances of C{expr}")
- C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
- C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
Note that C{expr*(None,n)} does not raise an exception if
more than n exprs exist in the input stream; that is,
C{expr*(None,n)} does not enforce a maximum number of expr
occurrences. If this behavior is desired, then write
C{expr*(None,n) + ~expr}
"""
if isinstance(other,int):
minElements, optElements = other,0
elif isinstance(other,tuple):
other = (other + (None, None))[:2]
if other[0] is None:
other = (0, other[1])
if isinstance(other[0],int) and other[1] is None:
if other[0] == 0:
return ZeroOrMore(self)
if other[0] == 1:
return OneOrMore(self)
else:
return self*other[0] + ZeroOrMore(self)
elif isinstance(other[0],int) and isinstance(other[1],int):
minElements, optElements = other
optElements -= minElements
else:
raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
else:
raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
if minElements < 0:
raise ValueError("cannot multiply ParserElement by negative value")
if optElements < 0:
raise ValueError("second tuple value must be greater or equal to first tuple value")
if minElements == optElements == 0:
raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
if (optElements):
def makeOptionalList(n):
if n>1:
return Optional(self + makeOptionalList(n-1))
else:
return Optional(self)
if minElements:
if minElements == 1:
ret = self + makeOptionalList(optElements)
else:
ret = And([self]*minElements) + makeOptionalList(optElements)
else:
ret = makeOptionalList(optElements)
else:
if minElements == 1:
ret = self
else:
ret = And([self]*minElements)
return ret
def __rmul__(self, other):
return self.__mul__(other)
def __or__(self, other ):
"""Implementation of | operator - returns C{L{MatchFirst}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return MatchFirst( [ self, other ] )
def __ror__(self, other ):
"""Implementation of | operator when left operand is not a C{L{ParserElement}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other | self
def __xor__(self, other ):
"""Implementation of ^ operator - returns C{L{Or}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return Or( [ self, other ] )
def __rxor__(self, other ):
"""Implementation of ^ operator when left operand is not a C{L{ParserElement}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other ^ self
def __and__(self, other ):
"""Implementation of & operator - returns C{L{Each}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return Each( [ self, other ] )
def __rand__(self, other ):
"""Implementation of & operator when left operand is not a C{L{ParserElement}}"""
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other & self
def __invert__( self ):
"""Implementation of ~ operator - returns C{L{NotAny}}"""
return NotAny( self )
def __call__(self, name=None):
"""Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}::
userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
could be written as::
userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
passed as C{True}.
If C{name} is omitted, same as calling C{L{copy}}.
"""
if name is not None:
return self.setResultsName(name)
else:
return self.copy()
def suppress( self ):
"""Suppresses the output of this C{ParserElement}; useful to keep punctuation from
cluttering up returned output.
"""
return Suppress( self )
def leaveWhitespace( self ):
"""Disables the skipping of whitespace before matching the characters in the
C{ParserElement}'s defined pattern. This is normally only used internally by
the pyparsing module, but may be needed in some whitespace-sensitive grammars.
"""
self.skipWhitespace = False
return self
def setWhitespaceChars( self, chars ):
"""Overrides the default whitespace chars
"""
self.skipWhitespace = True
self.whiteChars = chars
self.copyDefaultWhiteChars = False
return self
def parseWithTabs( self ):
"""Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
Must be called before C{parseString} when the input grammar contains elements that
match C{<TAB>} characters."""
self.keepTabs = True
return self
def ignore( self, other ):
"""Define expression to be ignored (e.g., comments) while doing pattern
matching; may be called repeatedly, to define multiple comment or other
ignorable patterns.
"""
if isinstance(other, basestring):
other = Suppress(other)
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
self.ignoreExprs.append(other)
else:
self.ignoreExprs.append( Suppress( other.copy() ) )
return self
def setDebugActions( self, startAction, successAction, exceptionAction ):
"""Enable display of debugging messages while doing pattern matching."""
self.debugActions = (startAction or _defaultStartDebugAction,
successAction or _defaultSuccessDebugAction,
exceptionAction or _defaultExceptionDebugAction)
self.debug = True
return self
def setDebug( self, flag=True ):
"""Enable display of debugging messages while doing pattern matching.
Set C{flag} to True to enable, False to disable."""
if flag:
self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
else:
self.debug = False
return self
def __str__( self ):
return self.name
def __repr__( self ):
return _ustr(self)
def streamline( self ):
self.streamlined = True
self.strRepr = None
return self
def checkRecursion( self, parseElementList ):
pass
def validate( self, validateTrace=[] ):
"""Check defined expressions for valid structure, check for infinite recursive definitions."""
self.checkRecursion( [] )
def parseFile( self, file_or_filename, parseAll=False ):
"""Execute the parse expression on the given file or filename.
If a filename is specified (instead of a file object),
the entire file is opened, read, and closed before parsing.
"""
try:
file_contents = file_or_filename.read()
except AttributeError:
f = open(file_or_filename, "r")
file_contents = f.read()
f.close()
try:
return self.parseString(file_contents, parseAll)
except ParseBaseException as exc:
if ParserElement.verbose_stacktrace:
raise
else:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc
def __eq__(self,other):
if isinstance(other, ParserElement):
return self is other or self.__dict__ == other.__dict__
elif isinstance(other, basestring):
try:
self.parseString(_ustr(other), parseAll=True)
return True
except ParseBaseException:
return False
else:
return super(ParserElement,self)==other
def __ne__(self,other):
return not (self == other)
def __hash__(self):
return hash(id(self))
def __req__(self,other):
return self == other
def __rne__(self,other):
return not (self == other)
def runTests(self, tests, parseAll=False):
"""Execute the parse expression on a series of test strings, showing each
test, the parsed results or where the parse failed. Quick and easy way to
run a parse expression against a list of sample strings.
Parameters:
- tests - a list of separate test strings, or a multiline string of test strings
- parseAll - (default=False) - flag to pass to C{L{parseString}} when running tests
"""
if isinstance(tests, basestring):
tests = map(str.strip, tests.splitlines())
for t in tests:
out = [t]
try:
out.append(self.parseString(t, parseAll=parseAll).dump())
except ParseException as pe:
if '\n' in t:
out.append(line(pe.loc, t))
out.append(' '*(col(pe.loc,t)-1) + '^')
else:
out.append(' '*pe.loc + '^')
out.append(str(pe))
out.append('')
print('\n'.join(out))
class Token(ParserElement):
"""Abstract C{ParserElement} subclass, for defining atomic matching patterns."""
def __init__( self ):
super(Token,self).__init__( savelist=False )
class Empty(Token):
"""An empty token, will always match."""
def __init__( self ):
super(Empty,self).__init__()
self.name = "Empty"
self.mayReturnEmpty = True
self.mayIndexError = False
class NoMatch(Token):
"""A token that will never match."""
def __init__( self ):
super(NoMatch,self).__init__()
self.name = "NoMatch"
self.mayReturnEmpty = True
self.mayIndexError = False
self.errmsg = "Unmatchable token"
def parseImpl( self, instring, loc, doActions=True ):
raise ParseException(instring, loc, self.errmsg, self)
class Literal(Token):
"""Token to exactly match a specified string."""
def __init__( self, matchString ):
super(Literal,self).__init__()
self.match = matchString
self.matchLen = len(matchString)
try:
self.firstMatchChar = matchString[0]
except IndexError:
warnings.warn("null string passed to Literal; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.__class__ = Empty
self.name = '"%s"' % _ustr(self.match)
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = False
self.mayIndexError = False
# Performance tuning: this routine gets called a *lot*
# if this is a single character match string and the first character matches,
# short-circuit as quickly as possible, and avoid calling startswith
#~ @profile
def parseImpl( self, instring, loc, doActions=True ):
if (instring[loc] == self.firstMatchChar and
(self.matchLen==1 or instring.startswith(self.match,loc)) ):
return loc+self.matchLen, self.match
raise ParseException(instring, loc, self.errmsg, self)
_L = Literal
ParserElement.literalStringClass = Literal
class Keyword(Token):
"""Token to exactly match a specified string as a keyword, that is, it must be
immediately followed by a non-keyword character. Compare with C{L{Literal}}::
Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}.
Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
Accepts two optional constructor arguments in addition to the keyword string:
C{identChars} is a string of characters that would be valid identifier characters,
defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive
matching, default is C{False}.
"""
DEFAULT_KEYWORD_CHARS = alphanums+"_$"
def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ):
super(Keyword,self).__init__()
self.match = matchString
self.matchLen = len(matchString)
try:
self.firstMatchChar = matchString[0]
except IndexError:
warnings.warn("null string passed to Keyword; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.name = '"%s"' % self.match
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = False
self.mayIndexError = False
self.caseless = caseless
if caseless:
self.caselessmatch = matchString.upper()
identChars = identChars.upper()
self.identChars = set(identChars)
def parseImpl( self, instring, loc, doActions=True ):
if self.caseless:
if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
(loc == 0 or instring[loc-1].upper() not in self.identChars) ):
return loc+self.matchLen, self.match
else:
if (instring[loc] == self.firstMatchChar and
(self.matchLen==1 or instring.startswith(self.match,loc)) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
(loc == 0 or instring[loc-1] not in self.identChars) ):
return loc+self.matchLen, self.match
raise ParseException(instring, loc, self.errmsg, self)
def copy(self):
c = super(Keyword,self).copy()
c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
return c
@staticmethod
def setDefaultKeywordChars( chars ):
"""Overrides the default Keyword chars
"""
Keyword.DEFAULT_KEYWORD_CHARS = chars
class CaselessLiteral(Literal):
"""Token to match a specified string, ignoring case of letters.
Note: the matched results will always be in the case of the given
match string, NOT the case of the input text.
"""
def __init__( self, matchString ):
super(CaselessLiteral,self).__init__( matchString.upper() )
# Preserve the defining literal.
self.returnString = matchString
self.name = "'%s'" % self.returnString
self.errmsg = "Expected " + self.name
def parseImpl( self, instring, loc, doActions=True ):
if instring[ loc:loc+self.matchLen ].upper() == self.match:
return loc+self.matchLen, self.returnString
raise ParseException(instring, loc, self.errmsg, self)
class CaselessKeyword(Keyword):
def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ):
super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
def parseImpl( self, instring, loc, doActions=True ):
if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
return loc+self.matchLen, self.match
raise ParseException(instring, loc, self.errmsg, self)
class Word(Token):
"""Token for matching words composed of allowed character sets.
Defined with string containing all allowed initial characters,
an optional string containing allowed body characters (if omitted,
defaults to the initial character set), and an optional minimum,
maximum, and/or exact length. The default value for C{min} is 1 (a
minimum value < 1 is not valid); the default values for C{max} and C{exact}
are 0, meaning no maximum or exact length restriction. An optional
C{excludeChars} parameter can list characters that might be found in
the input C{bodyChars} string; useful to define a word of all printables
except for one or two characters, for instance.
"""
def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
super(Word,self).__init__()
if excludeChars:
initChars = ''.join(c for c in initChars if c not in excludeChars)
if bodyChars:
bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
self.initCharsOrig = initChars
self.initChars = set(initChars)
if bodyChars :
self.bodyCharsOrig = bodyChars
self.bodyChars = set(bodyChars)
else:
self.bodyCharsOrig = initChars
self.bodyChars = set(initChars)
self.maxSpecified = max > 0
if min < 1:
raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
self.mayIndexError = False
self.asKeyword = asKeyword
if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
if self.bodyCharsOrig == self.initCharsOrig:
self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
elif len(self.initCharsOrig) == 1:
self.reString = "%s[%s]*" % \
(re.escape(self.initCharsOrig),
_escapeRegexRangeChars(self.bodyCharsOrig),)
else:
self.reString = "[%s][%s]*" % \
(_escapeRegexRangeChars(self.initCharsOrig),
_escapeRegexRangeChars(self.bodyCharsOrig),)
if self.asKeyword:
self.reString = r"\b"+self.reString+r"\b"
try:
self.re = re.compile( self.reString )
except:
self.re = None
def parseImpl( self, instring, loc, doActions=True ):
if self.re:
result = self.re.match(instring,loc)
if not result:
raise ParseException(instring, loc, self.errmsg, self)
loc = result.end()
return loc, result.group()
if not(instring[ loc ] in self.initChars):
raise ParseException(instring, loc, self.errmsg, self)
start = loc
loc += 1
instrlen = len(instring)
bodychars = self.bodyChars
maxloc = start + self.maxLen
maxloc = min( maxloc, instrlen )
while loc < maxloc and instring[loc] in bodychars:
loc += 1
throwException = False
if loc - start < self.minLen:
throwException = True
if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
throwException = True
if self.asKeyword:
if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
throwException = True
if throwException:
raise ParseException(instring, loc, self.errmsg, self)
return loc, instring[start:loc]
def __str__( self ):
try:
return super(Word,self).__str__()
except:
pass
if self.strRepr is None:
def charsAsStr(s):
if len(s)>4:
return s[:4]+"..."
else:
return s
if ( self.initCharsOrig != self.bodyCharsOrig ):
self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
else:
self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
return self.strRepr
class Regex(Token):
"""Token for matching strings that match a given regular expression.
Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
"""
compiledREtype = type(re.compile("[A-Z]"))
def __init__( self, pattern, flags=0):
"""The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
super(Regex,self).__init__()
if isinstance(pattern, basestring):
if not pattern:
warnings.warn("null string passed to Regex; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.pattern = pattern
self.flags = flags
try:
self.re = re.compile(self.pattern, self.flags)
self.reString = self.pattern
except sre_constants.error:
warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
SyntaxWarning, stacklevel=2)
raise
elif isinstance(pattern, Regex.compiledREtype):
self.re = pattern
self.pattern = \
self.reString = str(pattern)
self.flags = flags
else:
raise ValueError("Regex may only be constructed with a string or a compiled RE object")
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
self.mayIndexError = False
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
result = self.re.match(instring,loc)
if not result:
raise ParseException(instring, loc, self.errmsg, self)
loc = result.end()
d = result.groupdict()
ret = ParseResults(result.group())
if d:
for k in d:
ret[k] = d[k]
return loc,ret
def __str__( self ):
try:
return super(Regex,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "Re:(%s)" % repr(self.pattern)
return self.strRepr
class QuotedString(Token):
"""Token for matching strings that are delimited by quoting characters.
"""
def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None):
"""
Defined with the following parameters:
- quoteChar - string of one or more characters defining the quote delimiting string
- escChar - character to escape quotes, typically backslash (default=None)
- escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None)
- multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
- unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
- endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
"""
super(QuotedString,self).__init__()
# remove white space from quote chars - wont work anyway
quoteChar = quoteChar.strip()
if not quoteChar:
warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
raise SyntaxError()
if endQuoteChar is None:
endQuoteChar = quoteChar
else:
endQuoteChar = endQuoteChar.strip()
if not endQuoteChar:
warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
raise SyntaxError()
self.quoteChar = quoteChar
self.quoteCharLen = len(quoteChar)
self.firstQuoteChar = quoteChar[0]
self.endQuoteChar = endQuoteChar
self.endQuoteCharLen = len(endQuoteChar)
self.escChar = escChar
self.escQuote = escQuote
self.unquoteResults = unquoteResults
if multiline:
self.flags = re.MULTILINE | re.DOTALL
self.pattern = r'%s(?:[^%s%s]' % \
( re.escape(self.quoteChar),
_escapeRegexRangeChars(self.endQuoteChar[0]),
(escChar is not None and _escapeRegexRangeChars(escChar) or '') )
else:
self.flags = 0
self.pattern = r'%s(?:[^%s\n\r%s]' % \
( re.escape(self.quoteChar),
_escapeRegexRangeChars(self.endQuoteChar[0]),
(escChar is not None and _escapeRegexRangeChars(escChar) or '') )
if len(self.endQuoteChar) > 1:
self.pattern += (
'|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
_escapeRegexRangeChars(self.endQuoteChar[i]))
for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
)
if escQuote:
self.pattern += (r'|(?:%s)' % re.escape(escQuote))
if escChar:
self.pattern += (r'|(?:%s.)' % re.escape(escChar))
self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
try:
self.re = re.compile(self.pattern, self.flags)
self.reString = self.pattern
except sre_constants.error:
warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
SyntaxWarning, stacklevel=2)
raise
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
self.mayIndexError = False
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
if not result:
raise ParseException(instring, loc, self.errmsg, self)
loc = result.end()
ret = result.group()
if self.unquoteResults:
# strip off quotes
ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
if isinstance(ret,basestring):
# replace escaped characters
if self.escChar:
ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
# replace escaped quotes
if self.escQuote:
ret = ret.replace(self.escQuote, self.endQuoteChar)
return loc, ret
def __str__( self ):
try:
return super(QuotedString,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
return self.strRepr
class CharsNotIn(Token):
"""Token for matching words composed of characters *not* in a given set.
Defined with string containing all disallowed characters, and an optional
minimum, maximum, and/or exact length. The default value for C{min} is 1 (a
minimum value < 1 is not valid); the default values for C{max} and C{exact}
are 0, meaning no maximum or exact length restriction.
"""
def __init__( self, notChars, min=1, max=0, exact=0 ):
super(CharsNotIn,self).__init__()
self.skipWhitespace = False
self.notChars = notChars
if min < 1:
raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = ( self.minLen == 0 )
self.mayIndexError = False
def parseImpl( self, instring, loc, doActions=True ):
if instring[loc] in self.notChars:
raise ParseException(instring, loc, self.errmsg, self)
start = loc
loc += 1
notchars = self.notChars
maxlen = min( start+self.maxLen, len(instring) )
while loc < maxlen and \
(instring[loc] not in notchars):
loc += 1
if loc - start < self.minLen:
raise ParseException(instring, loc, self.errmsg, self)
return loc, instring[start:loc]
def __str__( self ):
try:
return super(CharsNotIn, self).__str__()
except:
pass
if self.strRepr is None:
if len(self.notChars) > 4:
self.strRepr = "!W:(%s...)" % self.notChars[:4]
else:
self.strRepr = "!W:(%s)" % self.notChars
return self.strRepr
class White(Token):
"""Special matching class for matching whitespace. Normally, whitespace is ignored
by pyparsing grammars. This class is included when some whitespace structures
are significant. Define with a string containing the whitespace characters to be
matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,
as defined for the C{L{Word}} class."""
whiteStrs = {
" " : "<SPC>",
"\t": "<TAB>",
"\n": "<LF>",
"\r": "<CR>",
"\f": "<FF>",
}
def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
super(White,self).__init__()
self.matchWhite = ws
self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
#~ self.leaveWhitespace()
self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
self.mayReturnEmpty = True
self.errmsg = "Expected " + self.name
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
def parseImpl( self, instring, loc, doActions=True ):
if not(instring[ loc ] in self.matchWhite):
raise ParseException(instring, loc, self.errmsg, self)
start = loc
loc += 1
maxloc = start + self.maxLen
maxloc = min( maxloc, len(instring) )
while loc < maxloc and instring[loc] in self.matchWhite:
loc += 1
if loc - start < self.minLen:
raise ParseException(instring, loc, self.errmsg, self)
return loc, instring[start:loc]
class _PositionToken(Token):
def __init__( self ):
super(_PositionToken,self).__init__()
self.name=self.__class__.__name__
self.mayReturnEmpty = True
self.mayIndexError = False
class GoToColumn(_PositionToken):
"""Token to advance to a specific column of input text; useful for tabular report scraping."""
def __init__( self, colno ):
super(GoToColumn,self).__init__()
self.col = colno
def preParse( self, instring, loc ):
if col(loc,instring) != self.col:
instrlen = len(instring)
if self.ignoreExprs:
loc = self._skipIgnorables( instring, loc )
while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
thiscol = col( loc, instring )
if thiscol > self.col:
raise ParseException( instring, loc, "Text not in expected column", self )
newloc = loc + self.col - thiscol
ret = instring[ loc: newloc ]
return newloc, ret
class LineStart(_PositionToken):
"""Matches if current position is at the beginning of a line within the parse string"""
def __init__( self ):
super(LineStart,self).__init__()
self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
self.errmsg = "Expected start of line"
def preParse( self, instring, loc ):
preloc = super(LineStart,self).preParse(instring,loc)
if instring[preloc] == "\n":
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
if not( loc==0 or
(loc == self.preParse( instring, 0 )) or
(instring[loc-1] == "\n") ): #col(loc, instring) != 1:
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
class LineEnd(_PositionToken):
"""Matches if current position is at the end of a line within the parse string"""
def __init__( self ):
super(LineEnd,self).__init__()
self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
self.errmsg = "Expected end of line"
def parseImpl( self, instring, loc, doActions=True ):
if loc<len(instring):
if instring[loc] == "\n":
return loc+1, "\n"
else:
raise ParseException(instring, loc, self.errmsg, self)
elif loc == len(instring):
return loc+1, []
else:
raise ParseException(instring, loc, self.errmsg, self)
class StringStart(_PositionToken):
"""Matches if current position is at the beginning of the parse string"""
def __init__( self ):
super(StringStart,self).__init__()
self.errmsg = "Expected start of text"
def parseImpl( self, instring, loc, doActions=True ):
if loc != 0:
# see if entire string up to here is just whitespace and ignoreables
if loc != self.preParse( instring, 0 ):
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
class StringEnd(_PositionToken):
"""Matches if current position is at the end of the parse string"""
def __init__( self ):
super(StringEnd,self).__init__()
self.errmsg = "Expected end of text"
def parseImpl( self, instring, loc, doActions=True ):
if loc < len(instring):
raise ParseException(instring, loc, self.errmsg, self)
elif loc == len(instring):
return loc+1, []
elif loc > len(instring):
return loc, []
else:
raise ParseException(instring, loc, self.errmsg, self)
class WordStart(_PositionToken):
"""Matches if the current position is at the beginning of a Word, and
is not preceded by any character in a given set of C{wordChars}
(default=C{printables}). To emulate the C{\b} behavior of regular expressions,
use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
the string being parsed, or at the beginning of a line.
"""
def __init__(self, wordChars = printables):
super(WordStart,self).__init__()
self.wordChars = set(wordChars)
self.errmsg = "Not at the start of a word"
def parseImpl(self, instring, loc, doActions=True ):
if loc != 0:
if (instring[loc-1] in self.wordChars or
instring[loc] not in self.wordChars):
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
class WordEnd(_PositionToken):
"""Matches if the current position is at the end of a Word, and
is not followed by any character in a given set of C{wordChars}
(default=C{printables}). To emulate the C{\b} behavior of regular expressions,
use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
the string being parsed, or at the end of a line.
"""
def __init__(self, wordChars = printables):
super(WordEnd,self).__init__()
self.wordChars = set(wordChars)
self.skipWhitespace = False
self.errmsg = "Not at the end of a word"
def parseImpl(self, instring, loc, doActions=True ):
instrlen = len(instring)
if instrlen>0 and loc<instrlen:
if (instring[loc] in self.wordChars or
instring[loc-1] not in self.wordChars):
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
class ParseExpression(ParserElement):
"""Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""
def __init__( self, exprs, savelist = False ):
super(ParseExpression,self).__init__(savelist)
if isinstance( exprs, _generatorType ):
exprs = list(exprs)
if isinstance( exprs, basestring ):
self.exprs = [ Literal( exprs ) ]
elif isinstance( exprs, collections.Sequence ):
# if sequence of strings provided, wrap with Literal
if all(isinstance(expr, basestring) for expr in exprs):
exprs = map(Literal, exprs)
self.exprs = list(exprs)
else:
try:
self.exprs = list( exprs )
except TypeError:
self.exprs = [ exprs ]
self.callPreparse = False
def __getitem__( self, i ):
return self.exprs[i]
def append( self, other ):
self.exprs.append( other )
self.strRepr = None
return self
def leaveWhitespace( self ):
"""Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
all contained expressions."""
self.skipWhitespace = False
self.exprs = [ e.copy() for e in self.exprs ]
for e in self.exprs:
e.leaveWhitespace()
return self
def ignore( self, other ):
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
super( ParseExpression, self).ignore( other )
for e in self.exprs:
e.ignore( self.ignoreExprs[-1] )
else:
super( ParseExpression, self).ignore( other )
for e in self.exprs:
e.ignore( self.ignoreExprs[-1] )
return self
def __str__( self ):
try:
return super(ParseExpression,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
return self.strRepr
def streamline( self ):
super(ParseExpression,self).streamline()
for e in self.exprs:
e.streamline()
# collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
# but only if there are no parse actions or resultsNames on the nested And's
# (likewise for Or's and MatchFirst's)
if ( len(self.exprs) == 2 ):
other = self.exprs[0]
if ( isinstance( other, self.__class__ ) and
not(other.parseAction) and
other.resultsName is None and
not other.debug ):
self.exprs = other.exprs[:] + [ self.exprs[1] ]
self.strRepr = None
self.mayReturnEmpty |= other.mayReturnEmpty
self.mayIndexError |= other.mayIndexError
other = self.exprs[-1]
if ( isinstance( other, self.__class__ ) and
not(other.parseAction) and
other.resultsName is None and
not other.debug ):
self.exprs = self.exprs[:-1] + other.exprs[:]
self.strRepr = None
self.mayReturnEmpty |= other.mayReturnEmpty
self.mayIndexError |= other.mayIndexError
self.errmsg = "Expected " + _ustr(self)
return self
def setResultsName( self, name, listAllMatches=False ):
ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
return ret
def validate( self, validateTrace=[] ):
tmp = validateTrace[:]+[self]
for e in self.exprs:
e.validate(tmp)
self.checkRecursion( [] )
def copy(self):
ret = super(ParseExpression,self).copy()
ret.exprs = [e.copy() for e in self.exprs]
return ret
class And(ParseExpression):
"""Requires all given C{ParseExpression}s to be found in the given order.
Expressions may be separated by whitespace.
May be constructed using the C{'+'} operator.
"""
class _ErrorStop(Empty):
def __init__(self, *args, **kwargs):
super(And._ErrorStop,self).__init__(*args, **kwargs)
self.name = '-'
self.leaveWhitespace()
def __init__( self, exprs, savelist = True ):
super(And,self).__init__(exprs, savelist)
self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
self.setWhitespaceChars( self.exprs[0].whiteChars )
self.skipWhitespace = self.exprs[0].skipWhitespace
self.callPreparse = True
def parseImpl( self, instring, loc, doActions=True ):
# pass False as last arg to _parse for first element, since we already
# pre-parsed the string as part of our And pre-parsing
loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
errorStop = False
for e in self.exprs[1:]:
if isinstance(e, And._ErrorStop):
errorStop = True
continue
if errorStop:
try:
loc, exprtokens = e._parse( instring, loc, doActions )
except ParseSyntaxException:
raise
except ParseBaseException as pe:
pe.__traceback__ = None
raise ParseSyntaxException(pe)
except IndexError:
raise ParseSyntaxException( ParseException(instring, len(instring), self.errmsg, self) )
else:
loc, exprtokens = e._parse( instring, loc, doActions )
if exprtokens or exprtokens.haskeys():
resultlist += exprtokens
return loc, resultlist
def __iadd__(self, other ):
if isinstance( other, basestring ):
other = Literal( other )
return self.append( other ) #And( [ self, other ] )
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
if not e.mayReturnEmpty:
break
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
return self.strRepr
class Or(ParseExpression):
"""Requires that at least one C{ParseExpression} is found.
If two expressions match, the expression that matches the longest string will be used.
May be constructed using the C{'^'} operator.
"""
def __init__( self, exprs, savelist = False ):
super(Or,self).__init__(exprs, savelist)
if self.exprs:
self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
else:
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
maxExcLoc = -1
maxException = None
matches = []
for e in self.exprs:
try:
loc2 = e.tryParse( instring, loc )
except ParseException as err:
err.__traceback__ = None
if err.loc > maxExcLoc:
maxException = err
maxExcLoc = err.loc
except IndexError:
if len(instring) > maxExcLoc:
maxException = ParseException(instring,len(instring),e.errmsg,self)
maxExcLoc = len(instring)
else:
# save match among all matches, to retry longest to shortest
matches.append((loc2, e))
if matches:
matches.sort(key=lambda x: -x[0])
for _,e in matches:
try:
return e._parse( instring, loc, doActions )
except ParseException as err:
err.__traceback__ = None
if err.loc > maxExcLoc:
maxException = err
maxExcLoc = err.loc
if maxException is not None:
maxException.msg = self.errmsg
raise maxException
else:
raise ParseException(instring, loc, "no defined alternatives to match", self)
def __ixor__(self, other ):
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
return self.append( other ) #Or( [ self, other ] )
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class MatchFirst(ParseExpression):
"""Requires that at least one C{ParseExpression} is found.
If two expressions match, the first one listed is the one that will match.
May be constructed using the C{'|'} operator.
"""
def __init__( self, exprs, savelist = False ):
super(MatchFirst,self).__init__(exprs, savelist)
if self.exprs:
self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
else:
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
maxExcLoc = -1
maxException = None
for e in self.exprs:
try:
ret = e._parse( instring, loc, doActions )
return ret
except ParseException as err:
if err.loc > maxExcLoc:
maxException = err
maxExcLoc = err.loc
except IndexError:
if len(instring) > maxExcLoc:
maxException = ParseException(instring,len(instring),e.errmsg,self)
maxExcLoc = len(instring)
# only got here if no expression matched, raise exception for match that made it the furthest
else:
if maxException is not None:
maxException.msg = self.errmsg
raise maxException
else:
raise ParseException(instring, loc, "no defined alternatives to match", self)
def __ior__(self, other ):
if isinstance( other, basestring ):
other = ParserElement.literalStringClass( other )
return self.append( other ) #MatchFirst( [ self, other ] )
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class Each(ParseExpression):
"""Requires all given C{ParseExpression}s to be found, but in any order.
Expressions may be separated by whitespace.
May be constructed using the C{'&'} operator.
"""
def __init__( self, exprs, savelist = True ):
super(Each,self).__init__(exprs, savelist)
self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
self.skipWhitespace = True
self.initExprGroups = True
def parseImpl( self, instring, loc, doActions=True ):
if self.initExprGroups:
self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
self.optionals = opt1 + opt2
self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
self.required += self.multirequired
self.initExprGroups = False
tmpLoc = loc
tmpReqd = self.required[:]
tmpOpt = self.optionals[:]
matchOrder = []
keepMatching = True
while keepMatching:
tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
failed = []
for e in tmpExprs:
if e.canParseNext(instring, tmpLoc):
matchOrder.append(self.opt1map.get(id(e),e))
if e in tmpReqd:
tmpReqd.remove(e)
elif e in tmpOpt:
tmpOpt.remove(e)
else:
failed.append(e)
if len(failed) == len(tmpExprs):
keepMatching = False
if tmpReqd:
missing = ", ".join(_ustr(e) for e in tmpReqd)
raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
# add any unmatched Optionals, in case they have default values defined
matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
resultlist = []
for e in matchOrder:
loc,results = e._parse(instring,loc,doActions)
resultlist.append(results)
finalResults = ParseResults()
for r in resultlist:
dups = {}
for k in r.keys():
if k in finalResults:
tmp = ParseResults(finalResults[k])
tmp += ParseResults(r[k])
dups[k] = tmp
finalResults += ParseResults(r)
for k,v in dups.items():
finalResults[k] = v
return loc, finalResults
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class ParseElementEnhance(ParserElement):
"""Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens."""
def __init__( self, expr, savelist=False ):
super(ParseElementEnhance,self).__init__(savelist)
if isinstance( expr, basestring ):
expr = Literal(expr)
self.expr = expr
self.strRepr = None
if expr is not None:
self.mayIndexError = expr.mayIndexError
self.mayReturnEmpty = expr.mayReturnEmpty
self.setWhitespaceChars( expr.whiteChars )
self.skipWhitespace = expr.skipWhitespace
self.saveAsList = expr.saveAsList
self.callPreparse = expr.callPreparse
self.ignoreExprs.extend(expr.ignoreExprs)
def parseImpl( self, instring, loc, doActions=True ):
if self.expr is not None:
return self.expr._parse( instring, loc, doActions, callPreParse=False )
else:
raise ParseException("",loc,self.errmsg,self)
def leaveWhitespace( self ):
self.skipWhitespace = False
self.expr = self.expr.copy()
if self.expr is not None:
self.expr.leaveWhitespace()
return self
def ignore( self, other ):
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
super( ParseElementEnhance, self).ignore( other )
if self.expr is not None:
self.expr.ignore( self.ignoreExprs[-1] )
else:
super( ParseElementEnhance, self).ignore( other )
if self.expr is not None:
self.expr.ignore( self.ignoreExprs[-1] )
return self
def streamline( self ):
super(ParseElementEnhance,self).streamline()
if self.expr is not None:
self.expr.streamline()
return self
def checkRecursion( self, parseElementList ):
if self in parseElementList:
raise RecursiveGrammarException( parseElementList+[self] )
subRecCheckList = parseElementList[:] + [ self ]
if self.expr is not None:
self.expr.checkRecursion( subRecCheckList )
def validate( self, validateTrace=[] ):
tmp = validateTrace[:]+[self]
if self.expr is not None:
self.expr.validate(tmp)
self.checkRecursion( [] )
def __str__( self ):
try:
return super(ParseElementEnhance,self).__str__()
except:
pass
if self.strRepr is None and self.expr is not None:
self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
return self.strRepr
class FollowedBy(ParseElementEnhance):
"""Lookahead matching of the given parse expression. C{FollowedBy}
does *not* advance the parsing position within the input string, it only
verifies that the specified parse expression matches at the current
position. C{FollowedBy} always returns a null token list."""
def __init__( self, expr ):
super(FollowedBy,self).__init__(expr)
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
self.expr.tryParse( instring, loc )
return loc, []
class NotAny(ParseElementEnhance):
"""Lookahead to disallow matching with the given parse expression. C{NotAny}
does *not* advance the parsing position within the input string, it only
verifies that the specified parse expression does *not* match at the current
position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny}
always returns a null token list. May be constructed using the '~' operator."""
def __init__( self, expr ):
super(NotAny,self).__init__(expr)
#~ self.leaveWhitespace()
self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
self.mayReturnEmpty = True
self.errmsg = "Found unwanted token, "+_ustr(self.expr)
def parseImpl( self, instring, loc, doActions=True ):
if self.expr.canParseNext(instring, loc):
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "~{" + _ustr(self.expr) + "}"
return self.strRepr
class OneOrMore(ParseElementEnhance):
"""Repetition of one or more of the given expression.
Parameters:
- expr - expression that must match one or more times
- stopOn - (default=None) - expression for a terminating sentinel
(only required if the sentinel would ordinarily match the repetition
expression)
"""
def __init__( self, expr, stopOn=None):
super(OneOrMore, self).__init__(expr)
ender = stopOn
if isinstance(ender, basestring):
ender = Literal(ender)
self.not_ender = ~ender if ender is not None else None
def parseImpl( self, instring, loc, doActions=True ):
self_expr_parse = self.expr._parse
self_skip_ignorables = self._skipIgnorables
check_ender = self.not_ender is not None
if check_ender:
try_not_ender = self.not_ender.tryParse
# must be at least one (but first see if we are the stopOn sentinel;
# if so, fail)
if check_ender:
try_not_ender(instring, loc)
loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
try:
hasIgnoreExprs = (not not self.ignoreExprs)
while 1:
if check_ender:
try_not_ender(instring, loc)
if hasIgnoreExprs:
preloc = self_skip_ignorables( instring, loc )
else:
preloc = loc
loc, tmptokens = self_expr_parse( instring, preloc, doActions )
if tmptokens or tmptokens.haskeys():
tokens += tmptokens
except (ParseException,IndexError):
pass
return loc, tokens
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + _ustr(self.expr) + "}..."
return self.strRepr
def setResultsName( self, name, listAllMatches=False ):
ret = super(OneOrMore,self).setResultsName(name,listAllMatches)
ret.saveAsList = True
return ret
class ZeroOrMore(OneOrMore):
"""Optional repetition of zero or more of the given expression.
Parameters:
- expr - expression that must match zero or more times
- stopOn - (default=None) - expression for a terminating sentinel
(only required if the sentinel would ordinarily match the repetition
expression)
"""
def __init__( self, expr, stopOn=None):
super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
try:
return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
except (ParseException,IndexError):
return loc, []
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "[" + _ustr(self.expr) + "]..."
return self.strRepr
class _NullToken(object):
def __bool__(self):
return False
__nonzero__ = __bool__
def __str__(self):
return ""
_optionalNotMatched = _NullToken()
class Optional(ParseElementEnhance):
"""Optional matching of the given expression.
Parameters:
- expr - expression that must match zero or more times
- default (optional) - value to be returned if the optional expression
is not found.
"""
def __init__( self, expr, default=_optionalNotMatched ):
super(Optional,self).__init__( expr, savelist=False )
self.defaultValue = default
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
try:
loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
except (ParseException,IndexError):
if self.defaultValue is not _optionalNotMatched:
if self.expr.resultsName:
tokens = ParseResults([ self.defaultValue ])
tokens[self.expr.resultsName] = self.defaultValue
else:
tokens = [ self.defaultValue ]
else:
tokens = []
return loc, tokens
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "[" + _ustr(self.expr) + "]"
return self.strRepr
class SkipTo(ParseElementEnhance):
"""Token for skipping over all undefined text until the matched expression is found.
Parameters:
- expr - target expression marking the end of the data to be skipped
- include - (default=False) if True, the target expression is also parsed
(the skipped text and target expression are returned as a 2-element list).
- ignore - (default=None) used to define grammars (typically quoted strings and
comments) that might contain false matches to the target expression
- failOn - (default=None) define expressions that are not allowed to be
included in the skipped test; if found before the target expression is found,
the SkipTo is not a match
"""
def __init__( self, other, include=False, ignore=None, failOn=None ):
super( SkipTo, self ).__init__( other )
self.ignoreExpr = ignore
self.mayReturnEmpty = True
self.mayIndexError = False
self.includeMatch = include
self.asList = False
if failOn is not None and isinstance(failOn, basestring):
self.failOn = Literal(failOn)
else:
self.failOn = failOn
self.errmsg = "No match found for "+_ustr(self.expr)
def parseImpl( self, instring, loc, doActions=True ):
startloc = loc
instrlen = len(instring)
expr = self.expr
expr_parse = self.expr._parse
self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
tmploc = loc
while tmploc <= instrlen:
if self_failOn_canParseNext is not None:
# break if failOn expression matches
if self_failOn.canParseNext(instring, tmploc):
break
if self_ignoreExpr_tryParse is not None:
# advance past ignore expressions
while 1:
try:
tmploc = self_ignoreExpr_tryParse(instring, tmploc)
except ParseBaseException:
break
try:
expr_parse(instring, tmploc, doActions=False, callPreParse=False)
except (ParseException, IndexError):
# no match, advance loc in string
tmploc += 1
else:
# matched skipto expr, done
break
else:
# ran off the end of the input string without matching skipto expr, fail
raise ParseException(instring, loc, self.errmsg, self)
# build up return values
loc = tmploc
skiptext = instring[startloc:loc]
skipresult = ParseResults(skiptext)
if self.includeMatch:
loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
skipresult += mat
return loc, skipresult
class Forward(ParseElementEnhance):
"""Forward declaration of an expression to be defined later -
used for recursive grammars, such as algebraic infix notation.
When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
Note: take care when assigning to C{Forward} not to overlook precedence of operators.
Specifically, '|' has a lower precedence than '<<', so that::
fwdExpr << a | b | c
will actually be evaluated as::
(fwdExpr << a) | b | c
thereby leaving b and c out as parseable alternatives. It is recommended that you
explicitly group the values inserted into the C{Forward}::
fwdExpr << (a | b | c)
Converting to use the '<<=' operator instead will avoid this problem.
"""
def __init__( self, other=None ):
super(Forward,self).__init__( other, savelist=False )
def __lshift__( self, other ):
if isinstance( other, basestring ):
other = ParserElement.literalStringClass(other)
self.expr = other
self.strRepr = None
self.mayIndexError = self.expr.mayIndexError
self.mayReturnEmpty = self.expr.mayReturnEmpty
self.setWhitespaceChars( self.expr.whiteChars )
self.skipWhitespace = self.expr.skipWhitespace
self.saveAsList = self.expr.saveAsList
self.ignoreExprs.extend(self.expr.ignoreExprs)
return self
def __ilshift__(self, other):
return self << other
def leaveWhitespace( self ):
self.skipWhitespace = False
return self
def streamline( self ):
if not self.streamlined:
self.streamlined = True
if self.expr is not None:
self.expr.streamline()
return self
def validate( self, validateTrace=[] ):
if self not in validateTrace:
tmp = validateTrace[:]+[self]
if self.expr is not None:
self.expr.validate(tmp)
self.checkRecursion([])
def __str__( self ):
if hasattr(self,"name"):
return self.name
return self.__class__.__name__ + ": ..."
# stubbed out for now - creates awful memory and perf issues
self._revertClass = self.__class__
self.__class__ = _ForwardNoRecurse
try:
if self.expr is not None:
retString = _ustr(self.expr)
else:
retString = "None"
finally:
self.__class__ = self._revertClass
return self.__class__.__name__ + ": " + retString
def copy(self):
if self.expr is not None:
return super(Forward,self).copy()
else:
ret = Forward()
ret <<= self
return ret
class _ForwardNoRecurse(Forward):
def __str__( self ):
return "..."
class TokenConverter(ParseElementEnhance):
"""Abstract subclass of C{ParseExpression}, for converting parsed results."""
def __init__( self, expr, savelist=False ):
super(TokenConverter,self).__init__( expr )#, savelist )
self.saveAsList = False
class Combine(TokenConverter):
"""Converter to concatenate all matching tokens to a single string.
By default, the matching patterns must also be contiguous in the input string;
this can be disabled by specifying C{'adjacent=False'} in the constructor.
"""
def __init__( self, expr, joinString="", adjacent=True ):
super(Combine,self).__init__( expr )
# suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
if adjacent:
self.leaveWhitespace()
self.adjacent = adjacent
self.skipWhitespace = True
self.joinString = joinString
self.callPreparse = True
def ignore( self, other ):
if self.adjacent:
ParserElement.ignore(self, other)
else:
super( Combine, self).ignore( other )
return self
def postParse( self, instring, loc, tokenlist ):
retToks = tokenlist.copy()
del retToks[:]
retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
if self.resultsName and retToks.haskeys():
return [ retToks ]
else:
return retToks
class Group(TokenConverter):
"""Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions."""
def __init__( self, expr ):
super(Group,self).__init__( expr )
self.saveAsList = True
def postParse( self, instring, loc, tokenlist ):
return [ tokenlist ]
class Dict(TokenConverter):
"""Converter to return a repetitive expression as a list, but also as a dictionary.
Each element can also be referenced using the first token in the expression as its key.
Useful for tabular report scraping when the first column can be used as a item key.
"""
def __init__( self, expr ):
super(Dict,self).__init__( expr )
self.saveAsList = True
def postParse( self, instring, loc, tokenlist ):
for i,tok in enumerate(tokenlist):
if len(tok) == 0:
continue
ikey = tok[0]
if isinstance(ikey,int):
ikey = _ustr(tok[0]).strip()
if len(tok)==1:
tokenlist[ikey] = _ParseResultsWithOffset("",i)
elif len(tok)==2 and not isinstance(tok[1],ParseResults):
tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
else:
dictvalue = tok.copy() #ParseResults(i)
del dictvalue[0]
if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
else:
tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
if self.resultsName:
return [ tokenlist ]
else:
return tokenlist
class Suppress(TokenConverter):
"""Converter for ignoring the results of a parsed expression."""
def postParse( self, instring, loc, tokenlist ):
return []
def suppress( self ):
return self
class OnlyOnce(object):
"""Wrapper for parse actions, to ensure they are only called once."""
def __init__(self, methodCall):
self.callable = _trim_arity(methodCall)
self.called = False
def __call__(self,s,l,t):
if not self.called:
results = self.callable(s,l,t)
self.called = True
return results
raise ParseException(s,l,"")
def reset(self):
self.called = False
def traceParseAction(f):
"""Decorator for debugging parse actions."""
f = _trim_arity(f)
def z(*paArgs):
thisFunc = f.func_name
s,l,t = paArgs[-3:]
if len(paArgs)>3:
thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) )
try:
ret = f(*paArgs)
except Exception as exc:
sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
raise
sys.stderr.write( "<<leaving %s (ret: %s)\n" % (thisFunc,ret) )
return ret
try:
z.__name__ = f.__name__
except AttributeError:
pass
return z
#
# global helpers
#
def delimitedList( expr, delim=",", combine=False ):
"""Helper to define a delimited list of expressions - the delimiter defaults to ','.
By default, the list elements and delimiters can have intervening whitespace, and
comments, but this can be overridden by passing C{combine=True} in the constructor.
If C{combine} is set to C{True}, the matching tokens are returned as a single token
string, with the delimiters included; otherwise, the matching tokens are returned
as a list of tokens, with the delimiters suppressed.
"""
dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
if combine:
return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
else:
return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
def countedArray( expr, intExpr=None ):
"""Helper to define a counted list of expressions.
This helper defines a pattern of the form::
integer expr expr expr...
where the leading integer tells how many expr expressions follow.
The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
"""
arrayExpr = Forward()
def countFieldParseAction(s,l,t):
n = t[0]
arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
return []
if intExpr is None:
intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
else:
intExpr = intExpr.copy()
intExpr.setName("arrayLen")
intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
def _flatten(L):
ret = []
for i in L:
if isinstance(i,list):
ret.extend(_flatten(i))
else:
ret.append(i)
return ret
def matchPreviousLiteral(expr):
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks
for a 'repeat' of a previous expression. For example::
first = Word(nums)
second = matchPreviousLiteral(first)
matchExpr = first + ":" + second
will match C{"1:1"}, but not C{"1:2"}. Because this matches a
previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
If this is not desired, use C{matchPreviousExpr}.
Do *not* use with packrat parsing enabled.
"""
rep = Forward()
def copyTokenToRepeater(s,l,t):
if t:
if len(t) == 1:
rep << t[0]
else:
# flatten t tokens
tflat = _flatten(t.asList())
rep << And(Literal(tt) for tt in tflat)
else:
rep << Empty()
expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
rep.setName('(prev) ' + _ustr(expr))
return rep
def matchPreviousExpr(expr):
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks
for a 'repeat' of a previous expression. For example::
first = Word(nums)
second = matchPreviousExpr(first)
matchExpr = first + ":" + second
will match C{"1:1"}, but not C{"1:2"}. Because this matches by
expressions, will *not* match the leading C{"1:1"} in C{"1:10"};
the expressions are evaluated first, and then compared, so
C{"1"} is compared with C{"10"}.
Do *not* use with packrat parsing enabled.
"""
rep = Forward()
e2 = expr.copy()
rep <<= e2
def copyTokenToRepeater(s,l,t):
matchTokens = _flatten(t.asList())
def mustMatchTheseTokens(s,l,t):
theseTokens = _flatten(t.asList())
if theseTokens != matchTokens:
raise ParseException("",0,"")
rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
rep.setName('(prev) ' + _ustr(expr))
return rep
def _escapeRegexRangeChars(s):
#~ escape these chars: ^-]
for c in r"\^-]":
s = s.replace(c,_bslash+c)
s = s.replace("\n",r"\n")
s = s.replace("\t",r"\t")
return _ustr(s)
def oneOf( strs, caseless=False, useRegex=True ):
"""Helper to quickly define a set of alternative Literals, and makes sure to do
longest-first testing when there is a conflict, regardless of the input order,
but returns a C{L{MatchFirst}} for best performance.
Parameters:
- strs - a string of space-delimited literals, or a list of string literals
- caseless - (default=False) - treat all literals as caseless
- useRegex - (default=True) - as an optimization, will generate a Regex
object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
if creating a C{Regex} raises an exception)
"""
if caseless:
isequal = ( lambda a,b: a.upper() == b.upper() )
masks = ( lambda a,b: b.upper().startswith(a.upper()) )
parseElementClass = CaselessLiteral
else:
isequal = ( lambda a,b: a == b )
masks = ( lambda a,b: b.startswith(a) )
parseElementClass = Literal
symbols = []
if isinstance(strs,basestring):
symbols = strs.split()
elif isinstance(strs, collections.Sequence):
symbols = list(strs[:])
elif isinstance(strs, _generatorType):
symbols = list(strs)
else:
warnings.warn("Invalid argument to oneOf, expected string or list",
SyntaxWarning, stacklevel=2)
if not symbols:
return NoMatch()
i = 0
while i < len(symbols)-1:
cur = symbols[i]
for j,other in enumerate(symbols[i+1:]):
if ( isequal(other, cur) ):
del symbols[i+j+1]
break
elif ( masks(cur, other) ):
del symbols[i+j+1]
symbols.insert(i,other)
cur = other
break
else:
i += 1
if not caseless and useRegex:
#~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
try:
if len(symbols)==len("".join(symbols)):
return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
else:
return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
except:
warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
SyntaxWarning, stacklevel=2)
# last resort, just use MatchFirst
return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
def dictOf( key, value ):
"""Helper to easily and clearly define a dictionary by specifying the respective patterns
for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
in the proper order. The key pattern can include delimiting markers or punctuation,
as long as they are suppressed, thereby leaving the significant key text. The value
pattern can include named results, so that the C{Dict} results can include named token
fields.
"""
return Dict( ZeroOrMore( Group ( key + value ) ) )
def originalTextFor(expr, asString=True):
"""Helper to return the original, untokenized text for a given expression. Useful to
restore the parsed fields of an HTML start tag into the raw tag text itself, or to
revert separate tokens with intervening whitespace back to the original matching
input text. By default, returns astring containing the original parsed text.
If the optional C{asString} argument is passed as C{False}, then the return value is a
C{L{ParseResults}} containing any results names that were originally matched, and a
single token containing the original matched text from the input string. So if
the expression passed to C{L{originalTextFor}} contains expressions with defined
results names, you must set C{asString} to C{False} if you want to preserve those
results name values."""
locMarker = Empty().setParseAction(lambda s,loc,t: loc)
endlocMarker = locMarker.copy()
endlocMarker.callPreparse = False
matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
if asString:
extractText = lambda s,l,t: s[t._original_start:t._original_end]
else:
def extractText(s,l,t):
del t[:]
t.insert(0, s[t._original_start:t._original_end])
del t["_original_start"]
del t["_original_end"]
matchExpr.setParseAction(extractText)
return matchExpr
def ungroup(expr):
"""Helper to undo pyparsing's default grouping of And expressions, even
if all but one are non-empty."""
return TokenConverter(expr).setParseAction(lambda t:t[0])
def locatedExpr(expr):
"""Helper to decorate a returned token with its starting and ending locations in the input string.
This helper adds the following results names:
- locn_start = location where matched expression begins
- locn_end = location where matched expression ends
- value = the actual parsed results
Be careful if the input text contains C{<TAB>} characters, you may want to call
C{L{ParserElement.parseWithTabs}}
"""
locator = Empty().setParseAction(lambda s,l,t: l)
return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
# convenience constants for positional expressions
empty = Empty().setName("empty")
lineStart = LineStart().setName("lineStart")
lineEnd = LineEnd().setName("lineEnd")
stringStart = StringStart().setName("stringStart")
stringEnd = StringEnd().setName("stringEnd")
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
def srange(s):
r"""Helper to easily define string ranges for use in Word construction. Borrows
syntax from regexp '[]' string range definitions::
srange("[0-9]") -> "0123456789"
srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
The input string must be enclosed in []'s, and the returned string is the expanded
character set joined into a single string.
The values enclosed in the []'s may be::
a single character
an escaped character with a leading backslash (such as \- or \])
an escaped hex character with a leading '\x' (\x21, which is a '!' character)
(\0x## is also supported for backwards compatibility)
an escaped octal character with a leading '\0' (\041, which is a '!' character)
a range of any of the above, separated by a dash ('a-z', etc.)
any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
"""
_expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
try:
return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
except:
return ""
def matchOnlyAtCol(n):
"""Helper method for defining parse actions that require matching at a specific
column in the input text.
"""
def verifyCol(strg,locn,toks):
if col(locn,strg) != n:
raise ParseException(strg,locn,"matched token not at column %d" % n)
return verifyCol
def replaceWith(replStr):
"""Helper method for common parse actions that simply return a literal value. Especially
useful when used with C{L{transformString<ParserElement.transformString>}()}.
"""
#def _replFunc(*args):
# return [replStr]
#return _replFunc
return functools.partial(next, itertools.repeat([replStr]))
def removeQuotes(s,l,t):
"""Helper parse action for removing quotation marks from parsed quoted strings.
To use, add this parse action to quoted string using::
quotedString.setParseAction( removeQuotes )
"""
return t[0][1:-1]
def upcaseTokens(s,l,t):
"""Helper parse action to convert tokens to upper case."""
return [ tt.upper() for tt in map(_ustr,t) ]
def downcaseTokens(s,l,t):
"""Helper parse action to convert tokens to lower case."""
return [ tt.lower() for tt in map(_ustr,t) ]
def getTokensEndLoc():
"""Method to be called from within a parse action to determine the end
location of the parsed tokens."""
import inspect
fstack = inspect.stack()
try:
# search up the stack (through intervening argument normalizers) for correct calling routine
for f in fstack[2:]:
if f[3] == "_parseNoCache":
endloc = f[0].f_locals["loc"]
return endloc
else:
raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action")
finally:
del fstack
def _makeTags(tagStr, xml):
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
if isinstance(tagStr,basestring):
resname = tagStr
tagStr = Keyword(tagStr, caseless=not xml)
else:
resname = tagStr.name
tagAttrName = Word(alphas,alphanums+"_-:")
if (xml):
tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
openTag = Suppress("<") + tagStr("tag") + \
Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
else:
printablesLessRAbrack = "".join(c for c in printables if c not in ">")
tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
openTag = Suppress("<") + tagStr("tag") + \
Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
Optional( Suppress("=") + tagAttrValue ) ))) + \
Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
closeTag = Combine(_L("</") + tagStr + ">")
openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
openTag.tag = resname
closeTag.tag = resname
return openTag, closeTag
def makeHTMLTags(tagStr):
"""Helper to construct opening and closing tag expressions for HTML, given a tag name"""
return _makeTags( tagStr, False )
def makeXMLTags(tagStr):
"""Helper to construct opening and closing tag expressions for XML, given a tag name"""
return _makeTags( tagStr, True )
def withAttribute(*args,**attrDict):
"""Helper to create a validating parse action to be used with start tags created
with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
with a required attribute value, to avoid false matches on common tags such as
C{<TD>} or C{<DIV>}.
Call C{withAttribute} with a series of attribute names and values. Specify the list
of filter attributes names and values as:
- keyword arguments, as in C{(align="right")}, or
- as an explicit dict with C{**} operator, when an attribute name is also a Python
reserved word, as in C{**{"class":"Customer", "align":"right"}}
- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
For attribute names with a namespace prefix, you must use the second form. Attribute
names are matched insensitive to upper/lower case.
If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
To verify that the attribute exists, but without specifying a value, pass
C{withAttribute.ANY_VALUE} as the value.
"""
if args:
attrs = args[:]
else:
attrs = attrDict.items()
attrs = [(k,v) for k,v in attrs]
def pa(s,l,tokens):
for attrName,attrValue in attrs:
if attrName not in tokens:
raise ParseException(s,l,"no matching attribute " + attrName)
if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
(attrName, tokens[attrName], attrValue))
return pa
withAttribute.ANY_VALUE = object()
def withClass(classname, namespace=''):
"""Simplified version of C{L{withAttribute}} when matching on a div class - made
difficult because C{class} is a reserved word in Python.
"""
classattr = "%s:class" % namespace if namespace else "class"
return withAttribute(**{classattr : classname})
opAssoc = _Constants()
opAssoc.LEFT = object()
opAssoc.RIGHT = object()
def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
"""Helper method for constructing grammars of expressions made up of
operators working in a precedence hierarchy. Operators may be unary or
binary, left- or right-associative. Parse actions can also be attached
to operator expressions.
Parameters:
- baseExpr - expression representing the most basic element for the nested
- opList - list of tuples, one for each operator precedence level in the
expression grammar; each tuple is of the form
(opExpr, numTerms, rightLeftAssoc, parseAction), where:
- opExpr is the pyparsing expression for the operator;
may also be a string, which will be converted to a Literal;
if numTerms is 3, opExpr is a tuple of two expressions, for the
two operators separating the 3 terms
- numTerms is the number of terms for this operator (must
be 1, 2, or 3)
- rightLeftAssoc is the indicator whether the operator is
right or left associative, using the pyparsing-defined
constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
- parseAction is the parse action to be associated with
expressions matching this operator expression (the
parse action tuple member may be omitted)
- lpar - expression for matching left-parentheses (default=Suppress('('))
- rpar - expression for matching right-parentheses (default=Suppress(')'))
"""
ret = Forward()
lastExpr = baseExpr | ( lpar + ret + rpar )
for i,operDef in enumerate(opList):
opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
if arity == 3:
if opExpr is None or len(opExpr) != 2:
raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
opExpr1, opExpr2 = opExpr
thisExpr = Forward().setName(termName)
if rightLeftAssoc == opAssoc.LEFT:
if arity == 1:
matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
elif arity == 2:
if opExpr is not None:
matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
else:
matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
elif arity == 3:
matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
else:
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
elif rightLeftAssoc == opAssoc.RIGHT:
if arity == 1:
# try to avoid LR with this extra test
if not isinstance(opExpr, Optional):
opExpr = Optional(opExpr)
matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
elif arity == 2:
if opExpr is not None:
matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
else:
matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
elif arity == 3:
matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
else:
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
else:
raise ValueError("operator must indicate right or left associativity")
if pa:
matchExpr.setParseAction( pa )
thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
lastExpr = thisExpr
ret <<= lastExpr
return ret
operatorPrecedence = infixNotation
dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes")
sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes")
quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes")
unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
"""Helper method for defining nested lists enclosed in opening and closing
delimiters ("(" and ")" are the default).
Parameters:
- opener - opening character for a nested list (default="("); can also be a pyparsing expression
- closer - closing character for a nested list (default=")"); can also be a pyparsing expression
- content - expression for items within the nested lists (default=None)
- ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString)
If an expression is not provided for the content argument, the nested
expression will capture all whitespace-delimited content between delimiters
as a list of separate values.
Use the C{ignoreExpr} argument to define expressions that may contain
opening or closing characters that should not be treated as opening
or closing characters for nesting, such as quotedString or a comment
expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
The default is L{quotedString}, but if no expressions are to be ignored,
then pass C{None} for this argument.
"""
if opener == closer:
raise ValueError("opening and closing strings cannot be the same")
if content is None:
if isinstance(opener,basestring) and isinstance(closer,basestring):
if len(opener) == 1 and len(closer)==1:
if ignoreExpr is not None:
content = (Combine(OneOrMore(~ignoreExpr +
CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
).setParseAction(lambda t:t[0].strip()))
else:
content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
).setParseAction(lambda t:t[0].strip()))
else:
if ignoreExpr is not None:
content = (Combine(OneOrMore(~ignoreExpr +
~Literal(opener) + ~Literal(closer) +
CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
).setParseAction(lambda t:t[0].strip()))
else:
content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
).setParseAction(lambda t:t[0].strip()))
else:
raise ValueError("opening and closing arguments must be strings if no content expression is given")
ret = Forward()
if ignoreExpr is not None:
ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
else:
ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
ret.setName('nested %s%s expression' % (opener,closer))
return ret
def indentedBlock(blockStatementExpr, indentStack, indent=True):
"""Helper method for defining space-delimited indentation blocks, such as
those used to define block statements in Python source code.
Parameters:
- blockStatementExpr - expression defining syntax of statement that
is repeated within the indented block
- indentStack - list created by caller to manage indentation stack
(multiple statementWithIndentedBlock expressions within a single grammar
should share a common indentStack)
- indent - boolean indicating whether block must be indented beyond the
the current level; set to False for block of left-most statements
(default=True)
A valid block must contain at least one C{blockStatement}.
"""
def checkPeerIndent(s,l,t):
if l >= len(s): return
curCol = col(l,s)
if curCol != indentStack[-1]:
if curCol > indentStack[-1]:
raise ParseFatalException(s,l,"illegal nesting")
raise ParseException(s,l,"not a peer entry")
def checkSubIndent(s,l,t):
curCol = col(l,s)
if curCol > indentStack[-1]:
indentStack.append( curCol )
else:
raise ParseException(s,l,"not a subentry")
def checkUnindent(s,l,t):
if l >= len(s): return
curCol = col(l,s)
if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
raise ParseException(s,l,"not an unindent")
indentStack.pop()
NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
PEER = Empty().setParseAction(checkPeerIndent).setName('')
UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
if indent:
smExpr = Group( Optional(NL) +
#~ FollowedBy(blockStatementExpr) +
INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
else:
smExpr = Group( Optional(NL) +
(OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
blockStatementExpr.ignore(_bslash + LineEnd())
return smExpr.setName('indented block')
alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
def replaceHTMLEntity(t):
"""Helper parser action to replace common HTML entities with their special characters"""
return _htmlEntityMap.get(t.entity)
# it's easy to get these comment structures wrong - they're very common, so may as well make them available
cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment")
htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment")
cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?<!\\)|\Z))").setName("C++ style comment")
javaStyleComment = cppStyleComment
pythonStyleComment = Regex(r"#.*").setName("Python style comment")
_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
Optional( Word(" \t") +
~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
if __name__ == "__main__":
selectToken = CaselessLiteral( "select" )
fromToken = CaselessLiteral( "from" )
ident = Word( alphas, alphanums + "_$" )
columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
columnNameList = Group( delimitedList( columnName ) ).setName("columns")
tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
tableNameList = Group( delimitedList( tableName ) ).setName("tables")
simpleSQL = ( selectToken + \
( '*' | columnNameList ).setResultsName( "columns" ) + \
fromToken + \
tableNameList.setResultsName( "tables" ) )
simpleSQL.runTests("""\
SELECT * from XYZZY, ABC
select * from SYS.XYZZY
Select A from Sys.dual
Select AA,BB,CC from Sys.dual
Select A, B, C from Sys.dual
Select A, B, C from Sys.dual
Xelect A, B, C from Sys.dual
Select A, B, C frox Sys.dual
Select
Select ^^^ frox Sys.dual
Select A, B, C from Sys.dual, Table2""")
|
andymckay/addons-server
|
refs/heads/master
|
src/olympia/accounts/tests/test_serializers.py
|
7
|
from olympia.amo.tests import BaseTestCase
from olympia.accounts.serializers import UserProfileSerializer
from olympia.users.models import UserProfile
class TestAccountSerializer(BaseTestCase):
def setUp(self):
self.user = UserProfile.objects.create(email='a@m.o')
def test_picture_url(self):
serial = UserProfileSerializer(instance=self.user)
assert ('anon_user.png' in serial.data['picture_url'])
|
ErykB2000/home-assistant
|
refs/heads/master
|
tests/test_component_media_player.py
|
4
|
"""
tests.test_component_media_player
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests media_player component.
"""
# pylint: disable=too-many-public-methods,protected-access
import logging
import unittest
import homeassistant as ha
from homeassistant.const import (
STATE_OFF,
SERVICE_TURN_ON, SERVICE_TURN_OFF, SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN,
SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PREVIOUS_TRACK, ATTR_ENTITY_ID)
import homeassistant.components.media_player as media_player
from helpers import mock_service
def setUpModule(): # pylint: disable=invalid-name
""" Setup to ignore media_player errors. """
logging.disable(logging.CRITICAL)
class TestMediaPlayer(unittest.TestCase):
""" Test the media_player module. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
self.test_entity = media_player.ENTITY_ID_FORMAT.format('living_room')
self.hass.states.set(self.test_entity, STATE_OFF)
self.test_entity2 = media_player.ENTITY_ID_FORMAT.format('bedroom')
self.hass.states.set(self.test_entity2, "YouTube")
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_is_on(self):
""" Test is_on method. """
self.assertFalse(media_player.is_on(self.hass, self.test_entity))
self.assertTrue(media_player.is_on(self.hass, self.test_entity2))
def test_services(self):
"""
Test if the call service methods conver to correct service calls.
"""
services = {
SERVICE_TURN_ON: media_player.turn_on,
SERVICE_TURN_OFF: media_player.turn_off,
SERVICE_VOLUME_UP: media_player.volume_up,
SERVICE_VOLUME_DOWN: media_player.volume_down,
SERVICE_MEDIA_PLAY_PAUSE: media_player.media_play_pause,
SERVICE_MEDIA_PLAY: media_player.media_play,
SERVICE_MEDIA_PAUSE: media_player.media_pause,
SERVICE_MEDIA_NEXT_TRACK: media_player.media_next_track,
SERVICE_MEDIA_PREVIOUS_TRACK: media_player.media_previous_track
}
for service_name, service_method in services.items():
calls = mock_service(self.hass, media_player.DOMAIN, service_name)
service_method(self.hass)
self.hass.pool.block_till_done()
self.assertEqual(1, len(calls))
call = calls[-1]
self.assertEqual(media_player.DOMAIN, call.domain)
self.assertEqual(service_name, call.service)
service_method(self.hass, self.test_entity)
self.hass.pool.block_till_done()
self.assertEqual(2, len(calls))
call = calls[-1]
self.assertEqual(media_player.DOMAIN, call.domain)
self.assertEqual(service_name, call.service)
self.assertEqual(self.test_entity,
call.data.get(ATTR_ENTITY_ID))
|
emk/pyjamas
|
refs/heads/master
|
library/pyjamas/chart/TouchedPointUpdateOption.py
|
6
|
"""
* Copyright 2007,2008,2009 John C. Gunther
* Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*
"""
"""*
* Defines how the <tt>update</tt> method updates the touched
* point, that is, the point the user is considered to be
* hovered over.
*
* @see #update(TouchedPointUpdateOption) update
*
"""
class TouchedPointUpdateOption(object):
def __init__(self):
pass
"""*
* When this option is passed to the update method, any
* touched point is cleared as a consequence of the update.
* <p>
*
* This option can be used when you want to "start fresh"
* with regards to hover feedback after an update, and want
* to assure that only explicit user-generated mouse move
* actions (rather than objects moving <i>underneath</i> a
* fixed-position mouse cursor) can trigger hover feedback.
*
* @see #update update
* @see #TOUCHED_POINT_LOCKED TOUCHED_POINT_LOCKED
* @see #TOUCHED_POINT_UPDATED TOUCHED_POINT_UPDATED
*
"""
TOUCHED_POINT_CLEARED = TouchedPointUpdateOption()
"""*
* When this option is passed to the update method, any
* previously touched point is locked in (remains unchanged).
* <p>
*
* For example, if the mouse is over a certain point before
* the update, and that point moves away from the mouse
* (without the mouse moving otherwise) as a consequence of
* the update, the hover feedback remains "locked in" to the
* original point, even though the mouse is no longer on top
* of that point.
* <p>
*
* This option is useful for hover widgets that modify the
* position, size, symbol of points/curves, and do not want the
* selected point/curve (and popup hover widget) to change as
* a consequence of such changes.
* <p>
*
* <i>Note:</i> If the currently touched point or the curve
* containing it is deleted, GChart sets the touched point
* reference to <tt>None</tt>. In that case, this option and
* <tt>TOUCHED_POINT_CLEARED</tt> behave the same way.
*
*
* @see #update update
* @see #TOUCHED_POINT_CLEARED TOUCHED_POINT_CLEARED
* @see #TOUCHED_POINT_UPDATED TOUCHED_POINT_UPDATED
*
"""
TOUCHED_POINT_LOCKED = TouchedPointUpdateOption()
"""*
* When this option is passed to the update method, the
* touched point is updated so that it reflects whatever point
* is underneath the mouse cursor after the update
* completes.
* <p>
*
* For example, if the mouse is not hovering over any point
* before the update, but the update repositions one of the
* points so that it is now underneath the mouse cursor,
* the hover feedback for that point will be displayed.
* Similarly, if the update moves a point away from the
* mouse cursor, previously displayed hover feedback will
* be eliminated.
* <p>
*
* @see #update update
* @see #TOUCHED_POINT_CLEARED TOUCHED_POINT_CLEARED
* @see #TOUCHED_POINT_LOCKED TOUCHED_POINT_LOCKED
*
"""
TOUCHED_POINT_UPDATED = TouchedPointUpdateOption()
|
sklnet/opendroid-enigma2
|
refs/heads/master
|
lib/python/Screens/ScanSetup.py
|
2
|
from Screen import Screen
from ServiceScan import ServiceScan
from Components.config import config, ConfigSubsection, ConfigSelection, ConfigYesNo, ConfigInteger, getConfigListEntry, ConfigSlider, ConfigEnableDisable
from Components.ActionMap import NumberActionMap, ActionMap
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager, getConfigSatlist
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Tools.HardwareInfo import HardwareInfo
from Screens.InfoBar import InfoBar
from Screens.MessageBox import MessageBox
from enigma import eTimer, eDVBFrontendParametersSatellite, eComponentScan, eDVBFrontendParametersTerrestrial, eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager
from Components.Converter.ChannelNumbers import channelnumbers
def buildTerTransponder(frequency,
inversion=2, bandwidth = 7000000, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4, system = 0, plpid = 0):
# print "freq", frequency, "inv", inversion, "bw", bandwidth, "fech", fechigh, "fecl", feclow, "mod", modulation, "tm", transmission, "guard", guard, "hierarchy", hierarchy
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
parm.system = system
parm.plpid = plpid
return parm
def getInitialTransponderList(tlist, pos):
list = nimmanager.getTransponders(pos)
for x in list:
if x[0] == 0: #SAT
parm = eDVBFrontendParametersSatellite()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.polarisation = x[3]
parm.fec = x[4]
parm.inversion = x[7]
parm.orbital_position = pos
parm.system = x[5]
parm.modulation = x[6]
parm.rolloff = x[8]
parm.pilot = x[9]
tlist.append(parm)
def getInitialCableTransponderList(tlist, nim):
list = nimmanager.getTranspondersCable(nim)
for x in list:
if x[0] == 1: #CABLE
parm = eDVBFrontendParametersCable()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.modulation = x[3]
parm.fec_inner = x[4]
parm.inversion = x[5]
parm.system = x[6]
tlist.append(parm)
def getInitialTerrestrialTransponderList(tlist, region):
list = nimmanager.getTranspondersTerrestrial(region)
#self.transponders[self.parsedTer].append((2,freq,bw,const,crh,crl,guard,transm,hierarchy,inv))
#def buildTerTransponder(frequency, inversion = 2, bandwidth = 3, fechigh = 6, feclow = 6,
#modulation = 2, transmission = 2, guard = 4, hierarchy = 4):
for x in list:
if x[0] == 2: #TERRESTRIAL
parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8], x[10], x[11])
tlist.append(parm)
cable_bands = {
"DVBC_BAND_EU_VHF_I" : 1 << 0,
"DVBC_BAND_EU_MID" : 1 << 1,
"DVBC_BAND_EU_VHF_III" : 1 << 2,
"DVBC_BAND_EU_SUPER" : 1 << 3,
"DVBC_BAND_EU_HYPER" : 1 << 4,
"DVBC_BAND_EU_UHF_IV" : 1 << 5,
"DVBC_BAND_EU_UHF_V" : 1 << 6,
"DVBC_BAND_US_LO" : 1 << 7,
"DVBC_BAND_US_MID" : 1 << 8,
"DVBC_BAND_US_HI" : 1 << 9,
"DVBC_BAND_US_SUPER" : 1 << 10,
"DVBC_BAND_US_HYPER" : 1 << 11,
}
class CableTransponderSearchSupport:
# def setCableTransponderSearchResult(self, tlist):
# pass
# def cableTransponderSearchFinished(self):
# pass
def __init__(self):
pass
def tryGetRawFrontend(self, feid):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
raw_channel = res_mgr.allocateRawChannel(self.feid)
if raw_channel:
frontend = raw_channel.getFrontend()
if frontend:
frontend.closeFrontend() # immediate close...
del frontend
del raw_channel
return True
return False
def cableTransponderSearchSessionClosed(self, *val):
print "cableTransponderSearchSessionClosed, val", val
self.cable_search_container.appClosed.remove(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.remove(self.getCableTransponderData)
if val and len(val):
if val[0]:
self.setCableTransponderSearchResult(self.__tlist)
else:
self.cable_search_container.sendCtrlC()
self.setCableTransponderSearchResult(None)
self.cable_search_container = None
self.cable_search_session = None
self.__tlist = None
self.cableTransponderSearchFinished()
def cableTransponderSearchClosed(self, retval):
print "cableTransponderSearch finished", retval
self.cable_search_session.close(True)
def getCableTransponderData(self, str):
#prepend any remaining data from the previous call
str = self.remainingdata + str
#split in lines
lines = str.split('\n')
#'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line
if len(lines[-1]):
#remember this data for next time
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
for line in lines:
data = line.split()
if len(data):
if data[0] == 'OK':
print str
parm = eDVBFrontendParametersCable()
qam = { "QAM16" : parm.Modulation_QAM16,
"QAM32" : parm.Modulation_QAM32,
"QAM64" : parm.Modulation_QAM64,
"QAM128" : parm.Modulation_QAM128,
"QAM256" : parm.Modulation_QAM256 }
inv = { "INVERSION_OFF" : parm.Inversion_Off,
"INVERSION_ON" : parm.Inversion_On,
"INVERSION_AUTO" : parm.Inversion_Unknown }
fec = { "FEC_AUTO" : parm.FEC_Auto,
"FEC_1_2" : parm.FEC_1_2,
"FEC_2_3" : parm.FEC_2_3,
"FEC_3_4" : parm.FEC_3_4,
"FEC_5_6" : parm.FEC_5_6,
"FEC_7_8" : parm.FEC_7_8,
"FEC_8_9" : parm.FEC_8_9,
"FEC_3_5" : parm.FEC_3_5,
"FEC_4_5" : parm.FEC_4_5,
"FEC_9_10" : parm.FEC_9_10,
"FEC_NONE" : parm.FEC_None }
parm.frequency = int(data[1])
parm.symbol_rate = int(data[2])
parm.fec_inner = fec[data[3]]
parm.modulation = qam[data[4]]
parm.inversion = inv[data[5]]
self.__tlist.append(parm)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n"
tmpstr += data[1].isdigit() and "%s MHz " % (int(data[1]) / 1000.) or data[1]
tmpstr += data[0]
self.cable_search_session["text"].setText(tmpstr)
def startCableTransponderSearch(self, nim_idx):
def GetCommand(nimIdx):
_supportNimType = { 'SSH108':'ssh108' }
_nimSocket = {}
fp = file('/proc/bus/nim_sockets')
sNo, sName = -1, ""
for line in fp:
line = line.strip()
if line.startswith('NIM Socket'):
try: sNo = line.split()[2][:-1]
except: sNo = -1
elif line.startswith('Name:'):
try: sName = line.split()[3][4:-1]
except: sName = ""
if sNo >= 0 and sName != "":
_nimSocket[sNo] = sName
sNo, sName = -1, ''
fp.close()
print 'parsed nim_sockets :', _nimSocket
try:
sName = _nimSocket[str(nimIdx)]
sType = _supportNimType[sName]
return sType
except: pass
return 'tda1002x'
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown:
self.session.infobar.showPiP()
if not self.tryGetRawFrontend(nim_idx):
self.cableTransponderSearchFinished()
return
self.__tlist = [ ]
self.remainingdata = ""
self.cable_search_container = eConsoleAppContainer()
self.cable_search_container.appClosed.append(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.append(self.getCableTransponderData)
cableConfig = config.Nims[nim_idx].cable
tunername = nimmanager.getNimName(nim_idx)
try:
bus = nimmanager.getI2CDevice(nim_idx)
if bus is None:
print "ERROR: could not get I2C device for nim", nim_idx, "for cable transponder search"
bus = 2
except:
# older API
if nim_idx < 2:
if HardwareInfo().get_device_name() == "dm500hd":
bus = 2
else:
bus = nim_idx
else:
if nim_idx == 2:
bus = 2 # DM8000 first nim is /dev/i2c/2
else:
bus = 4 # DM8000 second num is /dev/i2c/4
bin_name = None
if tunername == "CXD1981":
cmd = "cxd1978 --init --scan --verbose --wakeup --inv 2 --bus %d" % bus
elif tunername.startswith("Sundtek"):
cmd = "mediaclient --blindscan %d" % nim_idx
else:
bin_name = GetCommand(nim_idx)
cmd = "%(BIN_NAME)s --init --scan --verbose --wakeup --inv 2 --bus %(BUS)d" % {'BIN_NAME':bin_name , 'BUS':bus}
if cableConfig.scan_type.value == "bands":
cmd += " --scan-bands "
bands = 0
if cableConfig.scan_band_EU_VHF_I.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_I"]
if cableConfig.scan_band_EU_MID.value:
bands |= cable_bands["DVBC_BAND_EU_MID"]
if cableConfig.scan_band_EU_VHF_III.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_III"]
if cableConfig.scan_band_EU_UHF_IV.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_IV"]
if cableConfig.scan_band_EU_UHF_V.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_V"]
if cableConfig.scan_band_EU_SUPER.value:
bands |= cable_bands["DVBC_BAND_EU_SUPER"]
if cableConfig.scan_band_EU_HYPER.value:
bands |= cable_bands["DVBC_BAND_EU_HYPER"]
if cableConfig.scan_band_US_LOW.value:
bands |= cable_bands["DVBC_BAND_US_LO"]
if cableConfig.scan_band_US_MID.value:
bands |= cable_bands["DVBC_BAND_US_MID"]
if cableConfig.scan_band_US_HIGH.value:
bands |= cable_bands["DVBC_BAND_US_HI"]
if cableConfig.scan_band_US_SUPER.value:
bands |= cable_bands["DVBC_BAND_US_SUPER"]
if cableConfig.scan_band_US_HYPER.value:
bands |= cable_bands["DVBC_BAND_US_HYPER"]
cmd += str(bands)
else:
cmd += " --scan-stepsize "
cmd += str(cableConfig.scan_frequency_steps.value)
if cableConfig.scan_mod_qam16.value:
cmd += " --mod 16"
if cableConfig.scan_mod_qam32.value:
cmd += " --mod 32"
if cableConfig.scan_mod_qam64.value:
cmd += " --mod 64"
if cableConfig.scan_mod_qam128.value:
cmd += " --mod 128"
if cableConfig.scan_mod_qam256.value:
cmd += " --mod 256"
if cableConfig.scan_sr_6900.value:
cmd += " --sr 6900000"
if cableConfig.scan_sr_6875.value:
cmd += " --sr 6875000"
if cableConfig.scan_sr_ext1.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext1.value)
cmd += "000"
if cableConfig.scan_sr_ext2.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext2.value)
cmd += "000"
print bin_name, " CMD is", cmd
self.cable_search_container.execute(cmd)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n..."
self.cable_search_session = self.session.openWithCallback(self.cableTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
class ScanSetup(ConfigListScreen, Screen, CableTransponderSearchSupport):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Manual Scan"))
self.finished_cb = None
self.updateSatList()
self.service = session.nav.getCurrentService()
self.feinfo = None
self.networkid = 0
frontendData = None
if self.service is not None:
self.feinfo = self.service.frontendInfo()
frontendData = self.feinfo and self.feinfo.getAll(True)
self.ter_channel_input = False
self.ter_tnumber = None
self.createConfig(frontendData)
del self.feinfo
del self.service
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = NumberActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keyGo,
"menu": self.doCloseRecursive,
}, -2)
self.statusTimer = eTimer()
self.statusTimer.callback.append(self.updateStatus)
#self.statusTimer.start(5000, True)
self.list = []
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Manual Scan"))
if not self.scan_nims.value == "":
self.createSetup()
self["introduction"] = Label(_("Press OK to start the scan"))
else:
self["introduction"] = Label(_("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def updateSatList(self):
self.satList = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.satList.append(nimmanager.getSatListForNim(slot.slot))
else:
self.satList.append(None)
def createSetup(self):
self.list = []
self.multiscanlist = []
index_to_scan = int(self.scan_nims.value)
print "ID: ", index_to_scan
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
if self.scan_nims == [ ]:
return
self.typeOfScanEntry = None
self.typeOfInputEntry = None
self.systemEntry = None
self.modulationEntry = None
self.preDefSatList = None
self.TerrestrialTransponders = None
self.TerrestrialRegionEntry = None
nim = nimmanager.nim_slots[index_to_scan]
if nim.isCompatible("DVB-S"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_type)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-C"):
if config.Nims[index_to_scan].cable.scan_type.value != "provider": # only show predefined transponder if in provider mode
if self.scan_typecable.value == "predefined_transponder":
self.scan_typecable.value = self.cable_toggle[self.last_scan_typecable]
self.last_scan_typecable = self.scan_typecable.value
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typecable)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-T"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typeterrestrial)
self.list.append(self.typeOfScanEntry)
if self.scan_typeterrestrial.value == "single_transponder":
self.typeOfInputEntry = getConfigListEntry(_("Use frequency or channel"), self.scan_input_as)
if self.ter_channel_input:
self.list.append(self.typeOfInputEntry)
else:
self.scan_input_as.value = self.scan_input_as.choices[0]
self.scan_networkScan.value = False
if nim.isCompatible("DVB-S"):
if self.scan_type.value == "single_transponder":
self.updateSatList()
if nim.isCompatible("DVB-S2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_sat.system)
self.list.append(self.systemEntry)
else:
# downgrade to dvb-s, in case a -s2 config was active
self.scan_sat.system.value = eDVBFrontendParametersSatellite.System_DVB_S
self.list.append(getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan]))
self.list.append(getConfigListEntry(_('Frequency'), self.scan_sat.frequency))
self.list.append(getConfigListEntry(_('Inversion'), self.scan_sat.inversion))
self.list.append(getConfigListEntry(_('Symbol rate'), self.scan_sat.symbolrate))
self.list.append(getConfigListEntry(_('Polarization'), self.scan_sat.polarization))
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec))
elif self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec_s2))
self.modulationEntry = getConfigListEntry(_('Modulation'), self.scan_sat.modulation)
self.list.append(self.modulationEntry)
self.list.append(getConfigListEntry(_('Roll-off'), self.scan_sat.rolloff))
self.list.append(getConfigListEntry(_('Pilot'), self.scan_sat.pilot))
elif self.scan_type.value == "predefined_transponder" and self.satList[index_to_scan]:
self.updateSatList()
self.preDefSatList = getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan])
self.list.append(self.preDefSatList)
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
self.predefinedTranspondersList(sat[0])
self.list.append(getConfigListEntry(_('Transponder'), self.preDefTransponders))
elif self.scan_type.value == "single_satellite":
self.updateSatList()
print self.scan_satselection[index_to_scan]
self.list.append(getConfigListEntry(_("Satellite"), self.scan_satselection[index_to_scan]))
self.scan_networkScan.value = True
elif "multisat" in self.scan_type.value:
tlist = []
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in SatList:
if self.Satexists(tlist, x[0]) == 0:
tlist.append(x[0])
sat = ConfigEnableDisable(default = "_yes" in self.scan_type.value and True or False)
configEntry = getConfigListEntry(nimmanager.getSatDescription(x[0]), sat)
self.list.append(configEntry)
self.multiscanlist.append((x[0], sat))
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_cab.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_cab.inversion))
self.list.append(getConfigListEntry(_("Symbol rate"), self.scan_cab.symbolrate))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_cab.modulation))
self.list.append(getConfigListEntry(_("FEC"), self.scan_cab.fec))
elif self.scan_typecable.value == "predefined_transponder":
self.predefinedCabTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.CableTransponders))
if config.Nims[index_to_scan].cable.scan_networkid.value:
self.networkid = config.Nims[index_to_scan].cable.scan_networkid.value
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
if self.ter_channel_input and self.scan_input_as.value == "channel":
channel = channelnumbers.getChannelNumber(self.scan_ter.frequency.value*1000, self.ter_tnumber)
if channel:
self.scan_ter.channel.value = int(channel.replace("+","").replace("-",""))
self.list.append(getConfigListEntry(_("Channel"), self.scan_ter.channel))
else:
prev_val = self.scan_ter.frequency.value
self.scan_ter.frequency.value = channelnumbers.channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)/1000
if self.scan_ter.frequency.value == 474000:
self.scan_ter.frequency.value = prev_val
self.list.append(getConfigListEntry(_("Frequency"), self.scan_ter.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_ter.inversion))
self.list.append(getConfigListEntry(_("Bandwidth"), self.scan_ter.bandwidth))
self.list.append(getConfigListEntry(_("Code rate HP"), self.scan_ter.fechigh))
self.list.append(getConfigListEntry(_("Code rate LP"), self.scan_ter.feclow))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_ter.modulation))
self.list.append(getConfigListEntry(_("Transmission mode"), self.scan_ter.transmission))
self.list.append(getConfigListEntry(_("Guard interval"), self.scan_ter.guard))
self.list.append(getConfigListEntry(_("Hierarchy info"), self.scan_ter.hierarchy))
if self.scan_ter.system.value == eDVBFrontendParametersTerrestrial.System_DVB_T2:
self.list.append(getConfigListEntry(_('PLP ID'), self.scan_ter.plp_id))
elif self.scan_typeterrestrial.value == "predefined_transponder":
if nim.isCompatible("DVB-T2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntry)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.predefinedTerrTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.TerrestrialTransponders))
elif self.scan_typeterrestrial.value == "complete":
self.TerrestrialRegion = self.terrestrial_nims_regions[index_to_scan]
self.TerrestrialRegionEntry = getConfigListEntry(_('Region'), self.TerrestrialRegion)
self.list.append(self.TerrestrialRegionEntry)
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
self.list.append(getConfigListEntry(_("Only free scan"), self.scan_onlyfree))
self["config"].list = self.list
self["config"].l.setList(self.list)
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
def newConfig(self):
cur = self["config"].getCurrent()
print "cur is", cur
if cur == self.typeOfScanEntry or \
cur == self.typeOfInputEntry or \
cur == self.tunerEntry or \
cur == self.systemEntry or \
cur == self.preDefSatList or \
cur == self.TerrestrialRegionEntry or \
(self.modulationEntry and self.systemEntry[1].value == eDVBFrontendParametersSatellite.System_DVB_S2 and cur == self.modulationEntry):
self.createSetup()
def createConfig(self, frontendData):
defaultSat = {
"orbpos": 192,
"system": eDVBFrontendParametersSatellite.System_DVB_S,
"frequency": 11836,
"inversion": eDVBFrontendParametersSatellite.Inversion_Unknown,
"symbolrate": 27500,
"polarization": eDVBFrontendParametersSatellite.Polarisation_Horizontal,
"fec": eDVBFrontendParametersSatellite.FEC_Auto,
"fec_s2": eDVBFrontendParametersSatellite.FEC_9_10,
"modulation": eDVBFrontendParametersSatellite.Modulation_QPSK }
defaultCab = {
"frequency": 466,
"inversion": eDVBFrontendParametersCable.Inversion_Unknown,
"modulation": eDVBFrontendParametersCable.Modulation_QAM64,
"fec": eDVBFrontendParametersCable.FEC_Auto,
"symbolrate": 6900,
"system": eDVBFrontendParametersCable.System_DVB_C_ANNEX_A }
defaultTer = {
"frequency" : 474000,
"inversion" : eDVBFrontendParametersTerrestrial.Inversion_Unknown,
"bandwidth" : 8000000,
"fechigh" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"feclow" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"modulation" : eDVBFrontendParametersTerrestrial.Modulation_Auto,
"transmission_mode" : eDVBFrontendParametersTerrestrial.TransmissionMode_Auto,
"guard_interval" : eDVBFrontendParametersTerrestrial.GuardInterval_Auto,
"hierarchy": eDVBFrontendParametersTerrestrial.Hierarchy_Auto,
"system": eDVBFrontendParametersTerrestrial.System_DVB_T,
"plp_id": 0 }
if frontendData is not None:
ttype = frontendData.get("tuner_type", "UNKNOWN")
if ttype == "DVB-S":
defaultSat["system"] = frontendData.get("system", eDVBFrontendParametersSatellite.System_DVB_S)
defaultSat["frequency"] = frontendData.get("frequency", 0) / 1000
defaultSat["inversion"] = frontendData.get("inversion", eDVBFrontendParametersSatellite.Inversion_Unknown)
defaultSat["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultSat["polarization"] = frontendData.get("polarization", eDVBFrontendParametersSatellite.Polarisation_Horizontal)
if defaultSat["system"] == eDVBFrontendParametersSatellite.System_DVB_S2:
defaultSat["fec_s2"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["rolloff"] = frontendData.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35)
defaultSat["pilot"] = frontendData.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown)
else:
defaultSat["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["modulation"] = frontendData.get("modulation", eDVBFrontendParametersSatellite.Modulation_QPSK)
defaultSat["orbpos"] = frontendData.get("orbital_position", 0)
elif ttype == "DVB-C":
defaultCab["frequency"] = frontendData.get("frequency", 0) / 1000
defaultCab["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultCab["inversion"] = frontendData.get("inversion", eDVBFrontendParametersCable.Inversion_Unknown)
defaultCab["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersCable.FEC_Auto)
defaultCab["modulation"] = frontendData.get("modulation", eDVBFrontendParametersCable.Modulation_QAM16)
defaultCab["system"] = frontendData.get("system", eDVBFrontendParametersCable.System_DVB_C_ANNEX_A)
elif ttype == "DVB-T":
defaultTer["frequency"] = frontendData.get("frequency", 47400000) / 1000
defaultTer["inversion"] = frontendData.get("inversion", eDVBFrontendParametersTerrestrial.Inversion_Unknown)
defaultTer["bandwidth"] = frontendData.get("bandwidth", 8000000)
defaultTer["fechigh"] = frontendData.get("code_rate_hp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["feclow"] = frontendData.get("code_rate_lp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["modulation"] = frontendData.get("constellation", eDVBFrontendParametersTerrestrial.Modulation_Auto)
defaultTer["transmission_mode"] = frontendData.get("transmission_mode", eDVBFrontendParametersTerrestrial.TransmissionMode_Auto)
defaultTer["guard_interval"] = frontendData.get("guard_interval", eDVBFrontendParametersTerrestrial.GuardInterval_Auto)
defaultTer["hierarchy"] = frontendData.get("hierarchy_information", eDVBFrontendParametersTerrestrial.Hierarchy_Auto)
defaultTer["system"] = frontendData.get("system", eDVBFrontendParametersTerrestrial.System_DVB_T)
defaultTer["plp_id"] = frontendData.get("plp_id", 0)
self.scan_sat = ConfigSubsection()
self.scan_cab = ConfigSubsection()
self.scan_ter = ConfigSubsection()
nim_list = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if n.config_mode == "nothing":
continue
if n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
if n.config_mode in ("loopthrough", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nim_list.append((str(n.slot), n.friendly_full_description))
self.scan_nims = ConfigSelection(choices = nim_list)
if frontendData is not None and len(nim_list) > 0:
self.scan_nims.setValue(str(frontendData.get("tuner_number", nim_list[0][0])))
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.ter_tnumber = slot.slot
if self.ter_tnumber is not None:
self.ter_channel_input = channelnumbers.supportedChannels(self.ter_tnumber)
# status
self.scan_snr = ConfigSlider()
self.scan_snr.enabled = False
self.scan_agc = ConfigSlider()
self.scan_agc.enabled = False
self.scan_ber = ConfigSlider()
self.scan_ber.enabled = False
# sat
self.scan_sat.system = ConfigSelection(default = defaultSat["system"], choices = [
(eDVBFrontendParametersSatellite.System_DVB_S, _("DVB-S")),
(eDVBFrontendParametersSatellite.System_DVB_S2, _("DVB-S2"))])
self.scan_sat.frequency = ConfigInteger(default = defaultSat["frequency"], limits = (1, 99999))
self.scan_sat.inversion = ConfigSelection(default = defaultSat["inversion"], choices = [
(eDVBFrontendParametersSatellite.Inversion_Off, _("Off")),
(eDVBFrontendParametersSatellite.Inversion_On, _("On")),
(eDVBFrontendParametersSatellite.Inversion_Unknown, _("Auto"))])
self.scan_sat.symbolrate = ConfigInteger(default = defaultSat["symbolrate"], limits = (1, 99999))
self.scan_sat.polarization = ConfigSelection(default = defaultSat["polarization"], choices = [
(eDVBFrontendParametersSatellite.Polarisation_Horizontal, _("horizontal")),
(eDVBFrontendParametersSatellite.Polarisation_Vertical, _("vertical")),
(eDVBFrontendParametersSatellite.Polarisation_CircularLeft, _("circular left")),
(eDVBFrontendParametersSatellite.Polarisation_CircularRight, _("circular right"))])
self.scan_sat.fec = ConfigSelection(default = defaultSat["fec"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_None, _("None"))])
self.scan_sat.fec_s2 = ConfigSelection(default = defaultSat["fec_s2"], choices = [
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_3_5, "3/5"),
(eDVBFrontendParametersSatellite.FEC_4_5, "4/5"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_8_9, "8/9"),
(eDVBFrontendParametersSatellite.FEC_9_10, "9/10")])
self.scan_sat.modulation = ConfigSelection(default = defaultSat["modulation"], choices = [
(eDVBFrontendParametersSatellite.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersSatellite.Modulation_8PSK, "8PSK")])
self.scan_sat.rolloff = ConfigSelection(default = defaultSat.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35), choices = [
(eDVBFrontendParametersSatellite.RollOff_alpha_0_35, "0.35"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_25, "0.25"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_20, "0.20"),
(eDVBFrontendParametersSatellite.RollOff_auto, _("Auto"))])
self.scan_sat.pilot = ConfigSelection(default = defaultSat.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown), choices = [
(eDVBFrontendParametersSatellite.Pilot_Off, _("Off")),
(eDVBFrontendParametersSatellite.Pilot_On, _("On")),
(eDVBFrontendParametersSatellite.Pilot_Unknown, _("Auto"))])
# cable
self.scan_cab.frequency = ConfigInteger(default = defaultCab["frequency"], limits = (50, 999))
self.scan_cab.inversion = ConfigSelection(default = defaultCab["inversion"], choices = [
(eDVBFrontendParametersCable.Inversion_Off, _("Off")),
(eDVBFrontendParametersCable.Inversion_On, _("On")),
(eDVBFrontendParametersCable.Inversion_Unknown, _("Auto"))])
self.scan_cab.modulation = ConfigSelection(default = defaultCab["modulation"], choices = [
(eDVBFrontendParametersCable.Modulation_QAM16, "16-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM32, "32-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM64, "64-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM128, "128-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM256, "256-QAM")])
self.scan_cab.fec = ConfigSelection(default = defaultCab["fec"], choices = [
(eDVBFrontendParametersCable.FEC_Auto, _("Auto")),
(eDVBFrontendParametersCable.FEC_1_2, "1/2"),
(eDVBFrontendParametersCable.FEC_2_3, "2/3"),
(eDVBFrontendParametersCable.FEC_3_4, "3/4"),
(eDVBFrontendParametersCable.FEC_5_6, "5/6"),
(eDVBFrontendParametersCable.FEC_7_8, "7/8"),
(eDVBFrontendParametersCable.FEC_8_9, "8/9"),
(eDVBFrontendParametersCable.FEC_3_5, "3/5"),
(eDVBFrontendParametersCable.FEC_4_5, "4/5"),
(eDVBFrontendParametersCable.FEC_9_10, "9/10"),
(eDVBFrontendParametersCable.FEC_None, _("None"))])
self.scan_cab.symbolrate = ConfigInteger(default = defaultCab["symbolrate"], limits = (1, 99999))
self.scan_cab.system = ConfigSelection(default = defaultCab["system"], choices = [
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_A, _("DVB-C")),
(eDVBFrontendParametersCable.System_DVB_C_ANNEX_C, _("DVB-C ANNEX C"))])
# terrestial
self.scan_ter.frequency = ConfigInteger(default = defaultTer["frequency"], limits = (50000, 999000))
self.scan_ter.channel = ConfigInteger(default = 21, limits = (1, 99))
self.scan_ter.inversion = ConfigSelection(default = defaultTer["inversion"], choices = [
(eDVBFrontendParametersTerrestrial.Inversion_Off, _("Off")),
(eDVBFrontendParametersTerrestrial.Inversion_On, _("On")),
(eDVBFrontendParametersTerrestrial.Inversion_Unknown, _("Auto"))])
# WORKAROUND: we can't use BW-auto
self.scan_ter.bandwidth = ConfigSelection(default = defaultTer["bandwidth"], choices = [
(1712000, "1.712MHz"),
(5000000, "5MHz"),
(6000000, "6MHz"),
(7000000, "7MHz"),
(8000000, "8MHz"),
(10000000,"10MHz")
])
#, (eDVBFrontendParametersTerrestrial.Bandwidth_Auto, _("Auto"))))
self.scan_ter.fechigh = ConfigSelection(default = defaultTer["fechigh"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.feclow = ConfigSelection(default = defaultTer["feclow"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_6_7, "6/7"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_8_9, "8/9"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.modulation = ConfigSelection(default = defaultTer["modulation"], choices = [
(eDVBFrontendParametersTerrestrial.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM16, "QAM16"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM64, "QAM64"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM256, "QAM256"),
(eDVBFrontendParametersTerrestrial.Modulation_Auto, _("Auto"))])
self.scan_ter.transmission = ConfigSelection(default = defaultTer["transmission_mode"], choices = [
(eDVBFrontendParametersTerrestrial.TransmissionMode_1k, "1K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_2k, "2K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_4k, "4K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_8k, "8K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_16k, "16K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_32k, "32K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_Auto, _("Auto"))])
self.scan_ter.guard = ConfigSelection(default = defaultTer["guard_interval"], choices = [
(eDVBFrontendParametersTerrestrial.GuardInterval_1_32, "1/32"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_16, "1/16"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_8, "1/8"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_4, "1/4"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_128, "1/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_128, "19/128"),
(eDVBFrontendParametersTerrestrial.GuardInterval_19_256, "19/256"),
(eDVBFrontendParametersTerrestrial.GuardInterval_Auto, _("Auto"))])
self.scan_ter.hierarchy = ConfigSelection(default = defaultTer["hierarchy"], choices = [
(eDVBFrontendParametersTerrestrial.Hierarchy_None, _("None")),
(eDVBFrontendParametersTerrestrial.Hierarchy_1, "1"),
(eDVBFrontendParametersTerrestrial.Hierarchy_2, "2"),
(eDVBFrontendParametersTerrestrial.Hierarchy_4, "4"),
(eDVBFrontendParametersTerrestrial.Hierarchy_Auto, _("Auto"))])
self.scan_ter.system = ConfigSelection(default = defaultTer["system"], choices = [
(eDVBFrontendParametersTerrestrial.System_DVB_T, _("DVB-T")),
(eDVBFrontendParametersTerrestrial.System_DVB_T2, _("DVB-T2"))])
self.scan_ter.plp_id = ConfigInteger(default = defaultTer["plp_id"], limits = (0, 255))
self.scan_scansat = {}
for sat in nimmanager.satList:
#print sat[1]
self.scan_scansat[sat[0]] = ConfigYesNo(default = False)
self.scan_satselection = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.scan_satselection.append(getConfigSatlist(defaultSat["orbpos"], self.satList[slot.slot]))
else:
self.scan_satselection.append(None)
self.terrestrial_nims_regions = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-T"):
self.terrestrial_nims_regions.append(self.getTerrestrialRegionsList(slot.slot))
else:
self.terrestrial_nims_regions.append(None)
if frontendData is not None and ttype == "DVB-S" and self.predefinedTranspondersList(defaultSat["orbpos"]) is not None:
defaultSatSearchType = "predefined_transponder"
else:
defaultSatSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-T" and self.predefinedTerrTranspondersList() is not None:
defaultTerrSearchType = "predefined_transponder"
else:
defaultTerrSearchType = "single_transponder"
if frontendData is not None and ttype == "DVB-C" and self.predefinedCabTranspondersList() is not None:
defaultCabSearchType = "predefined_transponder"
else:
defaultCabSearchType = "single_transponder"
self.scan_type = ConfigSelection(default = defaultSatSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("single_satellite", _("Single satellite")), ("multisat", _("Multisat")), ("multisat_yes", _("Multisat all select"))])
self.scan_typecable = ConfigSelection(default = defaultCabSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.last_scan_typecable = "single_transponder"
self.cable_toggle = {"single_transponder":"complete", "complete":"single_transponder"}
self.scan_typeterrestrial = ConfigSelection(default = defaultTerrSearchType, choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder")), ("complete", _("Complete"))])
self.scan_input_as = ConfigSelection(default = "channel", choices = [("frequency", _("Frequency")), ("channel", _("Channel"))])
self.scan_clearallservices = ConfigSelection(default = "no", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.scan_onlyfree = ConfigYesNo(default = False)
self.scan_networkScan = ConfigYesNo(default = False)
return True
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.newConfig()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.newConfig()
def handleKeyFileCallback(self, answer):
ConfigListScreen.handleKeyFileCallback(self, answer)
self.newConfig()
def updateStatus(self):
print "updatestatus"
def addSatTransponder(self, tlist, frequency, symbol_rate, polarisation, fec, inversion, orbital_position, system, modulation, rolloff, pilot):
print "Add Sat: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(polarisation) + " fec: " + str(fec) + " inversion: " + str(inversion) + " modulation: " + str(modulation) + " system: " + str(system) + " rolloff" + str(rolloff) + " pilot" + str(pilot)
print "orbpos: " + str(orbital_position)
parm = eDVBFrontendParametersSatellite()
parm.modulation = modulation
parm.system = system
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.polarisation = polarisation
parm.fec = fec
parm.inversion = inversion
parm.orbital_position = orbital_position
parm.rolloff = rolloff
parm.pilot = pilot
tlist.append(parm)
def addCabTransponder(self, tlist, frequency, symbol_rate, modulation, fec, inversion):
print "Add Cab: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(modulation) + " fec: " + str(fec) + " inversion: " + str(inversion)
parm = eDVBFrontendParametersCable()
parm.frequency = frequency
parm.symbol_rate = symbol_rate
parm.modulation = modulation
parm.fec_inner = fec
parm.inversion = inversion
tlist.append(parm)
def addTerTransponder(self, tlist, *args, **kwargs):
tlist.append(buildTerTransponder(*args, **kwargs))
def keyGo(self):
infoBarInstance = InfoBar.instance
if infoBarInstance:
infoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
if not answer or self.scan_nims.value == "":
return
tlist = []
flags = None
startScan = True
removeAll = True
index_to_scan = int(self.scan_nims.value)
if self.scan_nims == [ ]:
self.session.open(MessageBox, _("No tuner is enabled!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
return
nim = nimmanager.nim_slots[index_to_scan]
print "nim", nim.slot
if nim.isCompatible("DVB-S"):
print "is compatible with DVB-S"
if self.scan_type.value == "single_transponder":
# these lists are generated for each tuner, so this has work.
assert len(self.satList) > index_to_scan
assert len(self.scan_satselection) > index_to_scan
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
# however, the satList itself could be empty. in that case, "index" is 0 (for "None").
if len(nimsats):
orbpos = nimsats[selsatidx][0]
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
fec = self.scan_sat.fec.value
else:
fec = self.scan_sat.fec_s2.value
print "add sat transponder"
self.addSatTransponder(tlist, self.scan_sat.frequency.value,
self.scan_sat.symbolrate.value,
self.scan_sat.polarization.value,
fec,
self.scan_sat.inversion.value,
orbpos,
self.scan_sat.system.value,
self.scan_sat.modulation.value,
self.scan_sat.rolloff.value,
self.scan_sat.pilot.value)
removeAll = False
elif self.scan_type.value == "predefined_transponder":
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
if len(nimsats):
orbpos = nimsats[selsatidx][0]
tps = nimmanager.getTransponders(orbpos)
if len(tps) and len(tps) > self.preDefTransponders.index:
tp = tps[self.preDefTransponders.index]
self.addSatTransponder(tlist, tp[1] / 1000, tp[2] / 1000, tp[3], tp[4], tp[7], orbpos, tp[5], tp[6], tp[8], tp[9])
removeAll = False
elif self.scan_type.value == "single_satellite":
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
getInitialTransponderList(tlist, sat[0])
elif "multisat" in self.scan_type.value:
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in self.multiscanlist:
if x[1].value:
print " " + str(x[0])
getInitialTransponderList(tlist, x[0])
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.addCabTransponder(tlist, self.scan_cab.frequency.value*1000,
self.scan_cab.symbolrate.value*1000,
self.scan_cab.modulation.value,
self.scan_cab.fec.value,
self.scan_cab.inversion.value)
removeAll = False
elif self.scan_typecable.value == "predefined_transponder":
tps = nimmanager.getTranspondersCable(index_to_scan)
if len(tps) and len(tps) > self.CableTransponders.index :
tp = tps[self.CableTransponders.index]
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
self.addCabTransponder(tlist, tp[1], tp[2], tp[3], tp[4], tp[5])
removeAll = False
elif self.scan_typecable.value == "complete":
if config.Nims[index_to_scan].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, index_to_scan)
else:
startScan = False
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
if self.scan_input_as.value == "channel":
frequency = channelnumbers.channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)
else:
frequency = self.scan_ter.frequency.value * 1000
self.addTerTransponder(tlist,
frequency,
inversion = self.scan_ter.inversion.value,
bandwidth = self.scan_ter.bandwidth.value,
fechigh = self.scan_ter.fechigh.value,
feclow = self.scan_ter.feclow.value,
modulation = self.scan_ter.modulation.value,
transmission = self.scan_ter.transmission.value,
guard = self.scan_ter.guard.value,
hierarchy = self.scan_ter.hierarchy.value,
system = self.scan_ter.system.value,
plpid = self.scan_ter.plp_id.value)
removeAll = False
elif self.scan_typeterrestrial.value == "predefined_transponder":
if self.TerrestrialTransponders is not None:
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
if len(tps) and len(tps) > self.TerrestrialTransponders.index :
tp = tps[self.TerrestrialTransponders.index]
tlist.append(buildTerTransponder(tp[1], tp[9], tp[2], tp[4], tp[5], tp[3], tp[7], tp[6], tp[8], tp[10], tp[11]))
removeAll = False
elif self.scan_typeterrestrial.value == "complete":
getInitialTerrestrialTransponderList(tlist, self.TerrestrialRegion.value)
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if tmp != "no" and not removeAll:
flags |= eComponentScan.scanDontRemoveUnscanned
if self.scan_onlyfree.value:
flags |= eComponentScan.scanOnlyFree
for x in self["config"].list:
x[1].save()
if startScan:
self.startScan(tlist, flags, index_to_scan, self.networkid)
else:
self.flags = flags
self.feid = index_to_scan
self.tlist = []
self.startCableTransponderSearch(self.feid)
def setCableTransponderSearchResult(self, tlist):
self.tlist = tlist
def cableTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def predefinedTranspondersList(self, orbpos):
default = None
if orbpos is not None:
list = []
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
fec = self.scan_sat.fec_s2.value
else:
fec = self.scan_sat.fec.value
compare = [0, self.scan_sat.frequency.value*1000, self.scan_sat.symbolrate.value*1000, self.scan_sat.polarization.value, fec]
i = 0
tps = nimmanager.getTransponders(orbpos)
for tp in tps:
if tp[0] == 0:
if default is None and self.compareTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableTransponder(tp)))
i += 1
self.preDefTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableTransponder(self, tp):
if tp[3] in range (4) and tp[4] in range (11):
pol_list = ['H','V','L','R']
fec_list = ['Auto','1/2','2/3','3/4','5/6','7/8','8/9','3/5','4/5','9/10','None']
return str(tp[1] / 1000) + " " + pol_list[tp[3]] + " " + str(tp[2] / 1000) + " " + fec_list[tp[4]]
return _("Invalid transponder data")
def compareTransponders(self, tp, compare):
frequencyTolerance = 2000 #2 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def predefinedTerrTranspondersList(self):
default = None
list = []
compare = [2, self.scan_ter.frequency.value*1000]
i = 0
index_to_scan = int(self.scan_nims.value)
channels = channelnumbers.supportedChannels(index_to_scan)
region = self.terrestrial_nims_regions[index_to_scan].value
tps = nimmanager.getTranspondersTerrestrial(region)
for tp in tps:
if tp[0] == 2: #TERRESTRIAL
channel = ''
if channels:
channel = _(' (Channel %s)') % (channelnumbers.getChannelNumber(tp[1], index_to_scan))
if default is None and self.compareTerrTransponders(tp, compare):
default = str(i)
list.append((str(i), '%s MHz %s' % (str(tp[1] / 1000000), channel)))
i += 1
print "channel", channel
self.TerrestrialTransponders = ConfigSelection(choices = list, default = default)
return default
def compareTerrTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
return abs(tp[1] - compare[1]) <= frequencyTolerance
def getTerrestrialRegionsList(self, index_to_scan = None):
default = None
list = []
if index_to_scan is None:
index_to_scan = int(self.scan_nims.value)
defaultRegionForNIM = nimmanager.getTerrestrialDescription(index_to_scan)
for r in nimmanager.terrestrialsList:
if default is None and r[0] == defaultRegionForNIM:
default = r[0]
list.append((r[0], r[0][:46]))
return ConfigSelection(choices = list, default = default)
def predefinedCabTranspondersList(self):
default = None
list = []
# 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
compare = [1, self.scan_cab.frequency.value*1000, self.scan_cab.symbolrate.value*1000, self.scan_cab.modulation.value, self.scan_cab.fec.value, self.scan_cab.inversion.value, self.scan_cab.system.value]
i = 0
index_to_scan = int(self.scan_nims.value)
tps = nimmanager.getTranspondersCable(index_to_scan)
for tp in tps:
if tp[0] == 1: #CABLE
if default is None and self.compareCabTransponders(tp, compare):
default = str(i)
list.append((str(i), self.humanReadableCabTransponder(tp)))
i += 1
self.CableTransponders = ConfigSelection(choices = list, default = default)
return default
def humanReadableCabTransponder(self, tp):
if tp[3] in range (6) and (tp[4] in range (10) or tp[4] == 15):
mod_list = ['Auto', '16-QAM','32-QAM','64-QAM','128-QAM', '256-QAM']
fec_list = {0:"Auto", 1:'1/2', 2:'2/3', 3:'3/4', 4:'5/6', 5:'7/8', 6:'8/9', 7:'3/5', 8:'4/5', 9:'9/10', 15:'None'}
print str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return str(tp[1]/1000) + " MHz " + fec_list[tp[4]] + " " + str(tp[2]/1000) + " " + mod_list[tp[3]]
return _("Invalid transponder data")
def compareCabTransponders(self, tp, compare):
frequencyTolerance = 1000000 #1 MHz
symbolRateTolerance = 10
return abs(tp[1] - compare[1]) <= frequencyTolerance and abs(tp[2] - compare[2]) <= symbolRateTolerance and tp[3] == compare[3] and (not tp[4] or tp[4] == compare[4])
def startScan(self, tlist, flags, feid, networkid = 0):
if len(tlist):
# flags |= eComponentScan.scanSearchBAT
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
self.session.openWithCallback(self.startScanCallback, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def startScanCallback(self, answer):
if answer:
self.doCloseRecursive()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
for x in self["config"].list:
x[1].cancel()
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
class ScanSimple(ConfigListScreen, Screen, CableTransponderSearchSupport):
def getNetworksForNim(self, nim):
if nim.isCompatible("DVB-S"):
networks = nimmanager.getSatListForNim(nim.slot)
elif not nim.empty:
networks = [ nim.type ] # "DVB-C" or "DVB-T". TODO: seperate networks for different C/T tuners, if we want to support that.
else:
# empty tuners provide no networks.
networks = [ ]
return networks
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Automatic Scan"))
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Scan"))
self["actions"] = ActionMap(["SetupActions", "MenuActions", "ColorActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"menu": self.doCloseRecursive,
"red": self.keyCancel,
"green": self.keyGo,
}, -2)
self.session.postScanService = session.nav.getCurrentlyPlayingServiceOrGroup()
self.list = []
tlist = []
known_networks = [ ]
nims_to_scan = [ ]
self.finished_cb = None
for nim in nimmanager.nim_slots:
# collect networks provided by this tuner
need_scan = False
networks = self.getNetworksForNim(nim)
print "nim %d provides" % nim.slot, networks
print "known:", known_networks
# we only need to scan on the first tuner which provides a network.
# this gives the first tuner for each network priority for scanning.
for x in networks:
if x not in known_networks:
need_scan = True
print x, "not in ", known_networks
known_networks.append(x)
# don't offer to scan nims if nothing is connected
if not nimmanager.somethingConnected(nim.slot):
need_scan = False
if need_scan:
nims_to_scan.append(nim)
# we save the config elements to use them on keyGo
self.nim_enable = [ ]
if len(nims_to_scan):
self.scan_networkScan = ConfigYesNo(default = True)
self.scan_clearallservices = ConfigSelection(default = "yes", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
for nim in nims_to_scan:
nimconfig = ConfigYesNo(default = True)
nimconfig.nim_index = nim.slot
self.nim_enable.append(nimconfig)
self.list.append(getConfigListEntry(_("Scan ") + nim.slot_name + " (" + nim.friendly_type + ")", nimconfig))
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Automatic scan"))
self["footer"] = Label(_("Press OK to scan"))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def keyGo(self):
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
InfoBarInstance.checkTimeshiftRunning(self.keyGoCheckTimeshiftCallback)
else:
self.keyGoCheckTimeshiftCallback(True)
def keyGoCheckTimeshiftCallback(self, answer):
if answer:
self.scanList = []
self.known_networks = set()
self.nim_iter=0
self.buildTransponderList()
def buildTransponderList(self): # this method is called multiple times because of asynchronous stuff
APPEND_NOW = 0
SEARCH_CABLE_TRANSPONDERS = 1
action = APPEND_NOW
n = self.nim_iter < len(self.nim_enable) and self.nim_enable[self.nim_iter] or None
self.nim_iter += 1
if n:
if n.value: # check if nim is enabled
flags = 0
nim = nimmanager.nim_slots[n.nim_index]
networks = set(self.getNetworksForNim(nim))
networkid = 0
# don't scan anything twice
networks.discard(self.known_networks)
tlist = [ ]
if nim.isCompatible("DVB-S"):
# get initial transponders for each satellite to be scanned
for sat in networks:
getInitialTransponderList(tlist, sat[0])
elif nim.isCompatible("DVB-C"):
if config.Nims[nim.slot].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, nim.slot)
else:
action = SEARCH_CABLE_TRANSPONDERS
networkid = config.Nims[nim.slot].cable.scan_networkid.value
elif nim.isCompatible("DVB-T"):
getInitialTerrestrialTransponderList(tlist, nimmanager.getTerrestrialDescription(nim.slot))
else:
assert False
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if action == APPEND_NOW:
self.scanList.append({"transponders": tlist, "feid": nim.slot, "flags": flags})
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = nim.slot
self.networkid = networkid
self.startCableTransponderSearch(nim.slot)
return
else:
assert False
self.buildTransponderList() # recursive call of this function !!!
return
# when we are here, then the recursion is finished and all enabled nims are checked
# so we now start the real transponder scan
self.startScan(self.scanList)
def startScan(self, scanList):
if len(scanList):
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, scanList = scanList)
else:
self.session.open(ServiceScan, scanList = scanList)
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def setCableTransponderSearchResult(self, tlist):
if tlist is not None:
self.scanList.append({"transponders": tlist, "feid": self.feid, "flags": self.flags})
def cableTransponderSearchFinished(self):
self.buildTransponderList()
def keyCancel(self):
self.session.nav.playService(self.session.postScanService)
self.close()
def doCloseRecursive(self):
self.session.nav.playService(self.session.postScanService)
self.closeRecursive()
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
|
TribeMedia/sky_engine
|
refs/heads/master
|
sky/tools/webkitpy/common/system/zipfileset_mock.py
|
167
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_factory(ziphashes):
"""ZipFileSet factory routine that looks up zipfiles in a dict;
each zipfile should also be a dict of member names -> contents."""
class MockZipFileSet(object):
def __init__(self, url):
self._url = url
self._ziphash = ziphashes[url]
def namelist(self):
return self._ziphash.keys()
def read(self, member):
return self._ziphash[member]
def close(self):
pass
def maker(url):
# We return None because there's no tempfile to delete.
return (None, MockZipFileSet(url))
return maker
|
fadhiilrachman/line-py
|
refs/heads/master
|
setup.py
|
1
|
# -*- coding: utf-8 -*-
"""
LINE Python -- LINE Messaging's private API
=========================================
>>> from linepy import *
Links
`````
* `GitHub repository <https://github.com/fadhiilrachman/line-py>`_
"""
from __future__ import with_statement
import re, codecs
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
with open('linepy/__init__.py') as f:
version = re.search(r'__version__\s*=\s*\'(.+?)\'', f.read()).group(1)
assert version
with open('README.rst') as f:
setup(
name='linepy',
packages=['linepy'],
version=version,
license='BSD 3 Clause License',
author='Fadhiil Rachman',
author_email='fadhiilrachman@gmail.com',
url='https://github.com/fadhiilrachman/line-py',
description=' LINE Messaging\'s private API',
long_description=f.read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Communications :: Chat',
],
install_requires=[
'akad',
'requests',
'rsa',
'PyQRCode'
],
)
|
sergei-maertens/django
|
refs/heads/master
|
tests/flatpages_tests/test_templatetags.py
|
67
|
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.template import Context, Template, TemplateSyntaxError
from django.test import TestCase
class FlatpageTemplateTagTests(TestCase):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
cls.fp1 = FlatPage.objects.create(
url='/flatpage/', title='A Flatpage', content="Isn't it flat!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp2 = FlatPage.objects.create(
url='/location/flatpage/', title='A Nested Flatpage', content="Isn't it flat and deep!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp3 = FlatPage.objects.create(
url='/sekrit/', title='Sekrit Flatpage', content="Isn't it sekrit!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp4 = FlatPage.objects.create(
url='/location/sekrit/', title='Sekrit Nested Flatpage', content="Isn't it sekrit and deep!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp1.sites.add(cls.site1)
cls.fp2.sites.add(cls.site1)
cls.fp3.sites.add(cls.site1)
cls.fp4.sites.add(cls.site1)
def test_get_flatpages_tag(self):
"The flatpage template tag retrieves unregistered prefixed flatpages by default"
out = Template(
"{% load flatpages %}"
"{% get_flatpages as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context())
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
def test_get_flatpages_tag_for_anon_user(self):
"The flatpage template tag retrieves unregistered flatpages for an anonymous user"
out = Template(
"{% load flatpages %}"
"{% get_flatpages for anonuser as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'anonuser': AnonymousUser()
}))
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
def test_get_flatpages_tag_for_user(self):
"The flatpage template tag retrieves all flatpages for an authenticated user"
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
out = Template(
"{% load flatpages %}"
"{% get_flatpages for me as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'me': me
}))
self.assertEqual(out, "A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,")
def test_get_flatpages_with_prefix(self):
"The flatpage template tag retrieves unregistered prefixed flatpages by default"
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context())
self.assertEqual(out, "A Nested Flatpage,")
def test_get_flatpages_with_prefix_for_anon_user(self):
"The flatpage template tag retrieves unregistered prefixed flatpages for an anonymous user"
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'anonuser': AnonymousUser()
}))
self.assertEqual(out, "A Nested Flatpage,")
def test_get_flatpages_with_prefix_for_user(self):
"The flatpage template tag retrieve prefixed flatpages for an authenticated user"
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' for me as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'me': me
}))
self.assertEqual(out, "A Nested Flatpage,Sekrit Nested Flatpage,")
def test_get_flatpages_with_variable_prefix(self):
"The prefix for the flatpage template tag can be a template variable"
out = Template(
"{% load flatpages %}"
"{% get_flatpages location_prefix as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'location_prefix': '/location/'
}))
self.assertEqual(out, "A Nested Flatpage,")
def test_parsing_errors(self):
"There are various ways that the flatpages template tag won't parse"
def render(t):
return Template(t).render(Context())
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages as %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages cheesecake flatpages %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages as flatpages asdf %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages cheesecake user as flatpages %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages for user as flatpages asdf %}")
with self.assertRaises(TemplateSyntaxError):
render("{% load flatpages %}{% get_flatpages prefix for user as flatpages asdf %}")
|
yask123/django
|
refs/heads/master
|
tests/custom_lookups/tests.py
|
177
|
from __future__ import unicode_literals
import contextlib
import time
import unittest
from datetime import date, datetime
from django.core.exceptions import FieldError
from django.db import connection, models
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Author, MySQLUnixTimestamp
@contextlib.contextmanager
def register_lookup(field, *lookups):
try:
for lookup in lookups:
field.register_lookup(lookup)
yield
finally:
for lookup in lookups:
field._unregister_lookup(lookup)
class Div3Lookup(models.Lookup):
lookup_name = 'div3'
def as_sql(self, compiler, connection):
lhs, params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
return '(%s) %%%% 3 = %s' % (lhs, rhs), params
def as_oracle(self, compiler, connection):
lhs, params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
return 'mod(%s, 3) = %s' % (lhs, rhs), params
class Div3Transform(models.Transform):
lookup_name = 'div3'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return '(%s) %%%% 3' % lhs, lhs_params
def as_oracle(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return 'mod(%s, 3)' % lhs, lhs_params
class Div3BilateralTransform(Div3Transform):
bilateral = True
class Mult3BilateralTransform(models.Transform):
bilateral = True
lookup_name = 'mult3'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return '3 * (%s)' % lhs, lhs_params
class UpperBilateralTransform(models.Transform):
bilateral = True
lookup_name = 'upper'
def as_sql(self, compiler, connection):
lhs, lhs_params = compiler.compile(self.lhs)
return 'UPPER(%s)' % lhs, lhs_params
class YearTransform(models.Transform):
# Use a name that avoids collision with the built-in year lookup.
lookup_name = 'testyear'
def as_sql(self, compiler, connection):
lhs_sql, params = compiler.compile(self.lhs)
return connection.ops.date_extract_sql('year', lhs_sql), params
@property
def output_field(self):
return models.IntegerField()
@YearTransform.register_lookup
class YearExact(models.lookups.Lookup):
lookup_name = 'exact'
def as_sql(self, compiler, connection):
# We will need to skip the extract part, and instead go
# directly with the originating field, that is self.lhs.lhs
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
# Note that we must be careful so that we have params in the
# same order as we have the parts in the SQL.
params = lhs_params + rhs_params + lhs_params + rhs_params
# We use PostgreSQL specific SQL here. Note that we must do the
# conversions in SQL instead of in Python to support F() references.
return ("%(lhs)s >= (%(rhs)s || '-01-01')::date "
"AND %(lhs)s <= (%(rhs)s || '-12-31')::date" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
@YearTransform.register_lookup
class YearLte(models.lookups.LessThanOrEqual):
"""
The purpose of this lookup is to efficiently compare the year of the field.
"""
def as_sql(self, compiler, connection):
# Skip the YearTransform above us (no possibility for efficient
# lookup otherwise).
real_lhs = self.lhs.lhs
lhs_sql, params = self.process_lhs(compiler, connection, real_lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
# Build SQL where the integer year is concatenated with last month
# and day, then convert that to date. (We try to have SQL like:
# WHERE somecol <= '2013-12-31')
# but also make it work if the rhs_sql is field reference.
return "%s <= (%s || '-12-31')::date" % (lhs_sql, rhs_sql), params
class SQLFunc(models.Lookup):
def __init__(self, name, *args, **kwargs):
super(SQLFunc, self).__init__(*args, **kwargs)
self.name = name
def as_sql(self, compiler, connection):
return '%s()', [self.name]
@property
def output_field(self):
return CustomField()
class SQLFuncFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
return SQLFunc(self.name, *args, **kwargs)
class CustomField(models.TextField):
def get_lookup(self, lookup_name):
if lookup_name.startswith('lookupfunc_'):
key, name = lookup_name.split('_', 1)
return SQLFuncFactory(name)
return super(CustomField, self).get_lookup(lookup_name)
def get_transform(self, lookup_name):
if lookup_name.startswith('transformfunc_'):
key, name = lookup_name.split('_', 1)
return SQLFuncFactory(name)
return super(CustomField, self).get_transform(lookup_name)
class CustomModel(models.Model):
field = CustomField()
# We will register this class temporarily in the test method.
class InMonth(models.lookups.Lookup):
"""
InMonth matches if the column's month is the same as value's month.
"""
lookup_name = 'inmonth'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
# We need to be careful so that we get the params in right
# places.
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%s >= date_trunc('month', %s) and "
"%s < date_trunc('month', %s) + interval '1 months'" %
(lhs, rhs, lhs, rhs), params)
class DateTimeTransform(models.Transform):
lookup_name = 'as_datetime'
@property
def output_field(self):
return models.DateTimeField()
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'from_unixtime({})'.format(lhs), params
class LookupTests(TestCase):
def test_basic_lookup(self):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
with register_lookup(models.IntegerField, Div3Lookup):
self.assertQuerysetEqual(
Author.objects.filter(age__div3=0),
[a3], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=1).order_by('age'),
[a1, a4], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=2),
[a2], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(age__div3=3),
[], lambda x: x
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_birthdate_month(self):
a1 = Author.objects.create(name='a1', birthdate=date(1981, 2, 16))
a2 = Author.objects.create(name='a2', birthdate=date(2012, 2, 29))
a3 = Author.objects.create(name='a3', birthdate=date(2012, 1, 31))
a4 = Author.objects.create(name='a4', birthdate=date(2012, 3, 1))
with register_lookup(models.DateField, InMonth):
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 1, 15)),
[a3], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 2, 1)),
[a2], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(1981, 2, 28)),
[a1], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 3, 12)),
[a4], lambda x: x
)
self.assertQuerysetEqual(
Author.objects.filter(birthdate__inmonth=date(2012, 4, 1)),
[], lambda x: x
)
def test_div3_extract(self):
with register_lookup(models.IntegerField, Div3Transform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__div3=2),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__lte=3),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[0, 2]),
[a2, a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[2, 4]),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__gte=3),
[], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__range=(1, 2)),
[a1, a2, a4], lambda x: x)
class BilateralTransformTests(TestCase):
def test_bilateral_upper(self):
with register_lookup(models.CharField, UpperBilateralTransform):
Author.objects.bulk_create([
Author(name='Doe'),
Author(name='doe'),
Author(name='Foo'),
])
self.assertQuerysetEqual(
Author.objects.filter(name__upper='doe'),
["<Author: Doe>", "<Author: doe>"], ordered=False)
self.assertQuerysetEqual(
Author.objects.filter(name__upper__contains='f'),
["<Author: Foo>"], ordered=False)
def test_bilateral_inner_qs(self):
with register_lookup(models.CharField, UpperBilateralTransform):
with self.assertRaises(NotImplementedError):
Author.objects.filter(name__upper__in=Author.objects.values_list('name'))
def test_div3_bilateral_extract(self):
with register_lookup(models.IntegerField, Div3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__div3=2),
[a2], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__lte=3),
[a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[0, 2]),
[a2, a3], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__in=[2, 4]),
[a1, a2, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__gte=3),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__range=(1, 2)),
[a1, a2, a4], lambda x: x)
def test_bilateral_order(self):
with register_lookup(models.IntegerField, Mult3BilateralTransform, Div3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
a3 = Author.objects.create(name='a3', age=3)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__mult3__div3=42),
# mult3__div3 always leads to 0
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(age__div3__mult3=42),
[a3], lambda x: x)
def test_bilateral_fexpr(self):
with register_lookup(models.IntegerField, Mult3BilateralTransform):
a1 = Author.objects.create(name='a1', age=1, average_rating=3.2)
a2 = Author.objects.create(name='a2', age=2, average_rating=0.5)
a3 = Author.objects.create(name='a3', age=3, average_rating=1.5)
a4 = Author.objects.create(name='a4', age=4)
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(age__mult3=models.F('age')),
[a1, a2, a3, a4], lambda x: x)
self.assertQuerysetEqual(
# Same as age >= average_rating
baseqs.filter(age__mult3__gte=models.F('average_rating')),
[a2, a3], lambda x: x)
@override_settings(USE_TZ=True)
class DateTimeLookupTests(TestCase):
@unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific SQL used")
def test_datetime_output_field(self):
with register_lookup(models.PositiveIntegerField, DateTimeTransform):
ut = MySQLUnixTimestamp.objects.create(timestamp=time.time())
y2k = timezone.make_aware(datetime(2000, 1, 1))
self.assertQuerysetEqual(
MySQLUnixTimestamp.objects.filter(timestamp__as_datetime__gt=y2k),
[ut], lambda x: x)
class YearLteTests(TestCase):
def setUp(self):
models.DateField.register_lookup(YearTransform)
self.a1 = Author.objects.create(name='a1', birthdate=date(1981, 2, 16))
self.a2 = Author.objects.create(name='a2', birthdate=date(2012, 2, 29))
self.a3 = Author.objects.create(name='a3', birthdate=date(2012, 1, 31))
self.a4 = Author.objects.create(name='a4', birthdate=date(2012, 3, 1))
def tearDown(self):
models.DateField._unregister_lookup(YearTransform)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_year_lte(self):
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear__lte=2012),
[self.a1, self.a2, self.a3, self.a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear=2012),
[self.a2, self.a3, self.a4], lambda x: x)
self.assertNotIn('BETWEEN', str(baseqs.filter(birthdate__testyear=2012).query))
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear__lte=2011),
[self.a1], lambda x: x)
# The non-optimized version works, too.
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear__lt=2012),
[self.a1], lambda x: x)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific SQL used")
def test_year_lte_fexpr(self):
self.a2.age = 2011
self.a2.save()
self.a3.age = 2012
self.a3.save()
self.a4.age = 2013
self.a4.save()
baseqs = Author.objects.order_by('name')
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear__lte=models.F('age')),
[self.a3, self.a4], lambda x: x)
self.assertQuerysetEqual(
baseqs.filter(birthdate__testyear__lt=models.F('age')),
[self.a4], lambda x: x)
def test_year_lte_sql(self):
# This test will just check the generated SQL for __lte. This
# doesn't require running on PostgreSQL and spots the most likely
# error - not running YearLte SQL at all.
baseqs = Author.objects.order_by('name')
self.assertIn(
'<= (2011 || ', str(baseqs.filter(birthdate__testyear__lte=2011).query))
self.assertIn(
'-12-31', str(baseqs.filter(birthdate__testyear__lte=2011).query))
def test_postgres_year_exact(self):
baseqs = Author.objects.order_by('name')
self.assertIn(
'= (2011 || ', str(baseqs.filter(birthdate__testyear=2011).query))
self.assertIn(
'-12-31', str(baseqs.filter(birthdate__testyear=2011).query))
def test_custom_implementation_year_exact(self):
try:
# Two ways to add a customized implementation for different backends:
# First is MonkeyPatch of the class.
def as_custom_sql(self, compiler, connection):
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%(lhs)s >= str_to_date(concat(%(rhs)s, '-01-01'), '%%%%Y-%%%%m-%%%%d') "
"AND %(lhs)s <= str_to_date(concat(%(rhs)s, '-12-31'), '%%%%Y-%%%%m-%%%%d')" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
setattr(YearExact, 'as_' + connection.vendor, as_custom_sql)
self.assertIn(
'concat(',
str(Author.objects.filter(birthdate__testyear=2012).query))
finally:
delattr(YearExact, 'as_' + connection.vendor)
try:
# The other way is to subclass the original lookup and register the subclassed
# lookup instead of the original.
class CustomYearExact(YearExact):
# This method should be named "as_mysql" for MySQL, "as_postgresql" for postgres
# and so on, but as we don't know which DB we are running on, we need to use
# setattr.
def as_custom_sql(self, compiler, connection):
lhs_sql, lhs_params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params + lhs_params + rhs_params
return ("%(lhs)s >= str_to_date(CONCAT(%(rhs)s, '-01-01'), '%%%%Y-%%%%m-%%%%d') "
"AND %(lhs)s <= str_to_date(CONCAT(%(rhs)s, '-12-31'), '%%%%Y-%%%%m-%%%%d')" %
{'lhs': lhs_sql, 'rhs': rhs_sql}, params)
setattr(CustomYearExact, 'as_' + connection.vendor, CustomYearExact.as_custom_sql)
YearTransform.register_lookup(CustomYearExact)
self.assertIn(
'CONCAT(',
str(Author.objects.filter(birthdate__testyear=2012).query))
finally:
YearTransform._unregister_lookup(CustomYearExact)
YearTransform.register_lookup(YearExact)
class TrackCallsYearTransform(YearTransform):
# Use a name that avoids collision with the built-in year lookup.
lookup_name = 'testyear'
call_order = []
def as_sql(self, compiler, connection):
lhs_sql, params = compiler.compile(self.lhs)
return connection.ops.date_extract_sql('year', lhs_sql), params
@property
def output_field(self):
return models.IntegerField()
def get_lookup(self, lookup_name):
self.call_order.append('lookup')
return super(TrackCallsYearTransform, self).get_lookup(lookup_name)
def get_transform(self, lookup_name):
self.call_order.append('transform')
return super(TrackCallsYearTransform, self).get_transform(lookup_name)
class LookupTransformCallOrderTests(TestCase):
def test_call_order(self):
with register_lookup(models.DateField, TrackCallsYearTransform):
# junk lookup - tries lookup, then transform, then fails
with self.assertRaises(FieldError):
Author.objects.filter(birthdate__testyear__junk=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup', 'transform'])
TrackCallsYearTransform.call_order = []
# junk transform - tries transform only, then fails
with self.assertRaises(FieldError):
Author.objects.filter(birthdate__testyear__junk__more_junk=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['transform'])
TrackCallsYearTransform.call_order = []
# Just getting the year (implied __exact) - lookup only
Author.objects.filter(birthdate__testyear=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup'])
TrackCallsYearTransform.call_order = []
# Just getting the year (explicit __exact) - lookup only
Author.objects.filter(birthdate__testyear__exact=2012)
self.assertEqual(TrackCallsYearTransform.call_order,
['lookup'])
class CustomisedMethodsTests(TestCase):
def test_overridden_get_lookup(self):
q = CustomModel.objects.filter(field__lookupfunc_monkeys=3)
self.assertIn('monkeys()', str(q.query))
def test_overridden_get_transform(self):
q = CustomModel.objects.filter(field__transformfunc_banana=3)
self.assertIn('banana()', str(q.query))
def test_overridden_get_lookup_chain(self):
q = CustomModel.objects.filter(field__transformfunc_banana__lookupfunc_elephants=3)
self.assertIn('elephants()', str(q.query))
def test_overridden_get_transform_chain(self):
q = CustomModel.objects.filter(field__transformfunc_banana__transformfunc_pear=3)
self.assertIn('pear()', str(q.query))
class SubqueryTransformTests(TestCase):
def test_subquery_usage(self):
with register_lookup(models.IntegerField, Div3Transform):
Author.objects.create(name='a1', age=1)
a2 = Author.objects.create(name='a2', age=2)
Author.objects.create(name='a3', age=3)
Author.objects.create(name='a4', age=4)
self.assertQuerysetEqual(
Author.objects.order_by('name').filter(id__in=Author.objects.filter(age__div3=2)),
[a2], lambda x: x)
|
maxamillion/product-definition-center
|
refs/heads/master
|
pdc/apps/repository/migrations/0002_auto_20150512_0724.py
|
8
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create(apps, model_name, data):
model = apps.get_model('repository', model_name)
for item in data:
model.objects.create(**item)
def create_service(apps, schema_editor):
create(apps, 'Service',
[
{
'description': 'Red Hat Network',
'name': 'rhn'
},
{
'description': 'Pulp (CDN)',
'name': 'pulp'
},
{
'description': 'ftp://ftp.redhat.com',
'name': 'ftp'
}
])
def create_content_category(apps, schema_editor):
create(apps, 'ContentCategory',
[
{
'description': 'Binary',
'name': 'binary'
},
{
'description': 'Debug',
'name': 'debug'
},
{
'description': 'Source',
'name': 'source'
}
])
def create_content_format(apps, schema_editor):
create(apps, 'ContentFormat',
[
{
'description': 'RPM packages',
'name': 'rpm'
},
{
'description': 'ISO images',
'name': 'iso'
},
{
'description': 'Installable kickstart trees',
'name': 'kickstart'
},
{
'description': 'Comps XML with package group definitions',
'name': 'comps'
},
{
'description': 'Docker image content format',
'name': 'docker'
},
{
'description': 'Driver Update Disk',
'name': 'dud'
}
])
def create_repo_family(apps, schema_editor):
create(apps, 'RepoFamily',
[
{
'description': 'Production repositories',
'name': 'dist'
},
{
'description': 'Beta (pre-production) repositories',
'name': 'beta'
},
{
'description': 'Repostitories for High Touch Beta (HTB) customers',
'name': 'htb'
}
])
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.RunPython(create_service),
migrations.RunPython(create_content_category),
migrations.RunPython(create_content_format),
migrations.RunPython(create_repo_family),
]
|
fatiherikli/dbpatterns
|
refs/heads/master
|
web/dbpatterns/notifications/views.py
|
1
|
import json
from itertools import imap
from pymongo import DESCENDING
from django.http import HttpResponse
from django.views.generic import ListView
from notifications.models import Notification
class NotificationListView(ListView):
template_name = "notifications/list.html"
ajax_template_name = "notifications/notifications.html"
context_object_name = "notifications"
def get_queryset(self):
notifications = self.get_notifications()
return imap(Notification, notifications)
def get_notifications(self):
notifications = Notification.objects.filter_by_user_id(
self.request.user.id)
if self.request.is_ajax():
return notifications.limit(5).sort([
("is_read", DESCENDING),
("date_created", DESCENDING)])
return notifications.sort([("date_created", DESCENDING)])
def get_template_names(self):
if self.request.is_ajax():
return [self.ajax_template_name]
return [self.template_name]
def put(self, request, **kwargs):
Notification.objects.mark_as_read(user_id=request.user.pk)
return HttpResponse(json.dumps({
"success": True
}))
|
sublime1809/django
|
refs/heads/master
|
django/contrib/auth/apps.py
|
79
|
from django.apps import AppConfig
from django.core import checks
from django.contrib.auth.checks import check_user_model
from django.utils.translation import ugettext_lazy as _
class AuthConfig(AppConfig):
name = 'django.contrib.auth'
verbose_name = _("Authentication and Authorization")
def ready(self):
checks.register(checks.Tags.models)(check_user_model)
|
SamuelMarks/kubernetes
|
refs/heads/master
|
cluster/juju/charms/trusty/kubernetes/hooks/lib/registrator.py
|
97
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import httplib
import json
import time
class Registrator:
def __init__(self):
self.ds ={
"creationTimestamp": "",
"kind": "Minion",
"name": "", # private_address
"metadata": {
"name": "", #private_address,
},
"spec": {
"externalID": "", #private_address
"capacity": {
"mem": "", # mem + ' K',
"cpu": "", # cpus
}
},
"status": {
"conditions": [],
"hostIP": "", #private_address
}
}
@property
def data(self):
''' Returns a data-structure for population to make a request. '''
return self.ds
def register(self, hostname, port, api_path):
''' Contact the API Server for a new registration '''
headers = {"Content-type": "application/json",
"Accept": "application/json"}
connection = httplib.HTTPConnection(hostname, port)
print 'CONN {}'.format(connection)
connection.request("POST", api_path, json.dumps(self.data), headers)
response = connection.getresponse()
body = response.read()
print(body)
result = json.loads(body)
print("Response status:%s reason:%s body:%s" % \
(response.status, response.reason, result))
return response, result
def update(self):
''' Contact the API Server to update a registration '''
# do a get on the API for the node
# repost to the API with any modified data
pass
def save(self):
''' Marshall the registration data '''
# TODO
pass
def command_succeeded(self, response, result):
''' Evaluate response data to determine if the command was successful '''
if response.status in [200, 201]:
print("Registered")
return True
elif response.status in [409,]:
print("Status Conflict")
# Suggested return a PUT instead of a POST with this response
# code, this predicates use of the UPDATE method
# TODO
elif response.status in (500,) and result.get(
'message', '').startswith('The requested resource does not exist'):
# There's something fishy in the kube api here (0.4 dev), first time we
# go to register a new minion, we always seem to get this error.
# https://github.com/GoogleCloudPlatform/kubernetes/issues/1995
time.sleep(1)
print("Retrying registration...")
raise ValueError("Registration returned 500, retry")
# return register_machine(apiserver, retry=True)
else:
print("Registration error")
# TODO - get request data
raise RuntimeError("Unable to register machine with")
|
ep1cman/workload-automation
|
refs/heads/master
|
wlauto/external/daq_server/src/daqpower/common.py
|
9
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101
import json
class Serializer(json.JSONEncoder):
def default(self, o): # pylint: disable=E0202
if isinstance(o, Serializable):
return o.serialize()
if isinstance(o, EnumEntry):
return o.name
return json.JSONEncoder.default(self, o)
class Serializable(object):
@classmethod
def deserialize(cls, text):
return cls(**json.loads(text))
def serialize(self, d=None):
if d is None:
d = self.__dict__
return json.dumps(d, cls=Serializer)
class DaqServerRequest(Serializable):
def __init__(self, command, params=None): # pylint: disable=W0231
self.command = command
self.params = params or {}
class DaqServerResponse(Serializable):
def __init__(self, status, message=None, data=None): # pylint: disable=W0231
self.status = status
self.message = message.strip().replace('\r\n', ' ') if message else ''
self.data = data or {}
def __str__(self):
return '{} {}'.format(self.status, self.message or '')
class EnumEntry(object):
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __cmp__(self, other):
return cmp(self.name, str(other))
class Enum(object):
"""
Assuming MyEnum = Enum('A', 'B'),
MyEnum.A and MyEnum.B are valid values.
a = MyEnum.A
(a == MyEnum.A) == True
(a in MyEnum) == True
MyEnum('A') == MyEnum.A
str(MyEnum.A) == 'A'
"""
def __init__(self, *args):
for a in args:
setattr(self, a, EnumEntry(a))
def __call__(self, value):
if value not in self.__dict__:
raise ValueError('Not enum value: {}'.format(value))
return self.__dict__[value]
def __iter__(self):
for e in self.__dict__:
yield self.__dict__[e]
Status = Enum('OK', 'OKISH', 'ERROR')
|
leiferikb/bitpop
|
refs/heads/master
|
src/chrome/common/extensions/docs/server2/reference_resolver_test.py
|
8
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from future import Future
from reference_resolver import ReferenceResolver
from test_object_store import TestObjectStore
from test_util import Server2Path
from third_party.json_schema_compiler.model import Namespace
_TEST_DATA = {
'baz': {
'namespace': 'baz',
'description': '',
'types': [
{
'id': 'baz_t1',
'type': 'any',
},
{
'id': 'baz_t2',
'type': 'any',
},
{
'id': 'baz_t3',
'type': 'any',
}
],
'functions': [
{
'name': 'baz_f1',
'type': 'function'
},
{
'name': 'baz_f2',
'type': 'function'
},
{
'name': 'baz_f3',
'type': 'function'
}
],
'events': [
{
'name': 'baz_e1',
'type': 'function'
},
{
'name': 'baz_e2',
'type': 'function'
},
{
'name': 'baz_e3',
'type': 'function'
}
],
'properties': {
'baz_p1': {'type': 'any'},
'baz_p2': {'type': 'any'},
'baz_p3': {'type': 'any'}
}
},
'bar.bon': {
'namespace': 'bar.bon',
'description': '',
'types': [
{
'id': 'bar_bon_t1',
'type': 'any',
},
{
'id': 'bar_bon_t2',
'type': 'any',
},
{
'id': 'bar_bon_t3',
'type': 'any',
}
],
'functions': [
{
'name': 'bar_bon_f1',
'type': 'function'
},
{
'name': 'bar_bon_f2',
'type': 'function'
},
{
'name': 'bar_bon_f3',
'type': 'function'
}
],
'events': [
{
'name': 'bar_bon_e1',
'type': 'function'
},
{
'name': 'bar_bon_e2',
'type': 'function'
},
{
'name': 'bar_bon_e3',
'type': 'function'
}
],
'properties': {
'bar_bon_p1': {'type': 'any'},
'bar_bon_p2': {'type': 'any'},
'bar_bon_p3': {'type': 'any'}
}
},
'bar': {
'namespace': 'bar',
'description': '',
'types': [
{
'id': 'bar_t1',
'type': 'any',
'properties': {
'bar_t1_p1': {
'type': 'any'
}
}
},
{
'id': 'bar_t2',
'type': 'any',
'properties': {
'bar_t2_p1': {
'type': 'any'
}
}
},
{
'id': 'bar_t3',
'type': 'any',
},
{
'id': 'bon',
'type': 'any'
}
],
'functions': [
{
'name': 'bar_f1',
'type': 'function'
},
{
'name': 'bar_f2',
'type': 'function'
},
{
'name': 'bar_f3',
'type': 'function'
}
],
'events': [
{
'name': 'bar_e1',
'type': 'function'
},
{
'name': 'bar_e2',
'type': 'function'
},
{
'name': 'bar_e3',
'type': 'function'
}
],
'properties': {
'bar_p1': {'type': 'any'},
'bar_p2': {'type': 'any'},
'bar_p3': {'$ref': 'bar_t1'}
}
},
'foo': {
'namespace': 'foo',
'description': '',
'types': [
{
'id': 'foo_t1',
'type': 'any',
},
{
'id': 'foo_t2',
'type': 'any',
},
{
'id': 'foo_t3',
'type': 'any',
'events': [
{
'name': 'foo_t3_e1',
'type': 'function'
}
]
}
],
'functions': [
{
'name': 'foo_f1',
'type': 'function'
},
{
'name': 'foo_f2',
'type': 'function'
},
{
'name': 'foo_f3',
'type': 'function'
}
],
'events': [
{
'name': 'foo_e1',
'type': 'function'
},
{
'name': 'foo_e2',
'type': 'function'
},
{
'name': 'foo_e3',
'type': 'function'
}
],
'properties': {
'foo_p1': {'$ref': 'foo_t3'},
'foo_p2': {'type': 'any'},
'foo_p3': {'type': 'any'}
}
}
}
class _FakeAPIModels(object):
def __init__(self, apis):
self._apis = apis
def GetNames(self):
return self._apis.keys()
def GetModel(self, name):
return Future(value=Namespace(self._apis[name], 'fake/path.json'))
class ReferenceResolverTest(unittest.TestCase):
def setUp(self):
self._base_path = Server2Path('test_data', 'test_json')
def _ReadLocalFile(self, filename):
with open(os.path.join(self._base_path, filename), 'r') as f:
return f.read()
def testGetLink(self):
resolver = ReferenceResolver(_FakeAPIModels(_TEST_DATA),
TestObjectStore('test'))
self.assertEqual({
'href': 'foo',
'text': 'foo',
'name': 'foo'
}, resolver.GetLink('foo', namespace='baz'))
self.assertEqual({
'href': 'foo#type-foo_t1',
'text': 'foo.foo_t1',
'name': 'foo_t1'
}, resolver.GetLink('foo.foo_t1', namespace='baz'))
self.assertEqual({
'href': 'baz#event-baz_e1',
'text': 'baz_e1',
'name': 'baz_e1'
}, resolver.GetLink('baz.baz_e1', namespace='baz'))
self.assertEqual({
'href': 'baz#event-baz_e1',
'text': 'baz_e1',
'name': 'baz_e1'
}, resolver.GetLink('baz_e1', namespace='baz'))
self.assertEqual({
'href': 'foo#method-foo_f1',
'text': 'foo.foo_f1',
'name': 'foo_f1'
}, resolver.GetLink('foo.foo_f1', namespace='baz'))
self.assertEqual({
'href': 'foo#property-foo_p3',
'text': 'foo.foo_p3',
'name': 'foo_p3'
}, resolver.GetLink('foo.foo_p3', namespace='baz'))
self.assertEqual({
'href': 'bar.bon#type-bar_bon_t3',
'text': 'bar.bon.bar_bon_t3',
'name': 'bar_bon_t3'
}, resolver.GetLink('bar.bon.bar_bon_t3', namespace='baz'))
self.assertEqual({
'href': 'bar.bon#property-bar_bon_p3',
'text': 'bar_bon_p3',
'name': 'bar_bon_p3'
}, resolver.GetLink('bar_bon_p3', namespace='bar.bon'))
self.assertEqual({
'href': 'bar.bon#property-bar_bon_p3',
'text': 'bar_bon_p3',
'name': 'bar_bon_p3'
}, resolver.GetLink('bar.bon.bar_bon_p3', namespace='bar.bon'))
self.assertEqual({
'href': 'bar#event-bar_e2',
'text': 'bar_e2',
'name': 'bar_e2'
}, resolver.GetLink('bar.bar_e2', namespace='bar'))
self.assertEqual({
'href': 'bar#type-bon',
'text': 'bon',
'name': 'bon'
}, resolver.GetLink('bar.bon', namespace='bar'))
self.assertEqual({
'href': 'foo#event-foo_t3-foo_t3_e1',
'text': 'foo_t3.foo_t3_e1',
'name': 'foo_t3_e1'
}, resolver.GetLink('foo_t3.foo_t3_e1', namespace='foo'))
self.assertEqual({
'href': 'foo#event-foo_t3-foo_t3_e1',
'text': 'foo_t3.foo_t3_e1',
'name': 'foo_t3_e1'
}, resolver.GetLink('foo.foo_t3.foo_t3_e1', namespace='foo'))
self.assertEqual({
'href': 'foo#event-foo_t3-foo_t3_e1',
'text': 'foo_t3.foo_t3_e1',
'name': 'foo_t3_e1'
}, resolver.GetLink('foo.foo_p1.foo_t3_e1', namespace='foo'))
self.assertEqual({
'href': 'bar#property-bar_t1-bar_t1_p1',
'text': 'bar.bar_t1.bar_t1_p1',
'name': 'bar_t1_p1'
}, resolver.GetLink('bar.bar_p3.bar_t1_p1', namespace='foo'))
self.assertEqual({
'href': 'bar#property-bar_t1-bar_t1_p1',
'text': 'bar_t1.bar_t1_p1',
'name': 'bar_t1_p1'
}, resolver.GetLink('bar_p3.bar_t1_p1', namespace='bar'))
self.assertEqual(
None,
resolver.GetLink('bar.bar_p3.bar_t2_p1', namespace='bar'))
self.assertEqual(
None,
resolver.GetLink('bar.bon.bar_e3', namespace='bar'))
self.assertEqual(
None,
resolver.GetLink('bar_p3', namespace='baz.bon'))
self.assertEqual(
None,
resolver.GetLink('falafel.faf', namespace='a'))
self.assertEqual(
None,
resolver.GetLink('bar_p3', namespace='foo'))
if __name__ == '__main__':
unittest.main()
|
yencarnacion/jaikuengine
|
refs/heads/master
|
.google_appengine/lib/django-1.4/django/contrib/messages/storage/cookie.py
|
83
|
from django.conf import settings
from django.contrib.messages.storage.base import BaseStorage, Message
from django.http import SimpleCookie
from django.utils import simplejson as json
from django.utils.crypto import salted_hmac, constant_time_compare
class MessageEncoder(json.JSONEncoder):
"""
Compactly serializes instances of the ``Message`` class as JSON.
"""
message_key = '__json_message'
def default(self, obj):
if isinstance(obj, Message):
message = [self.message_key, obj.level, obj.message]
if obj.extra_tags:
message.append(obj.extra_tags)
return message
return super(MessageEncoder, self).default(obj)
class MessageDecoder(json.JSONDecoder):
"""
Decodes JSON that includes serialized ``Message`` instances.
"""
def process_messages(self, obj):
if isinstance(obj, list) and obj:
if obj[0] == MessageEncoder.message_key:
return Message(*obj[1:])
return [self.process_messages(item) for item in obj]
if isinstance(obj, dict):
return dict([(key, self.process_messages(value))
for key, value in obj.iteritems()])
return obj
def decode(self, s, **kwargs):
decoded = super(MessageDecoder, self).decode(s, **kwargs)
return self.process_messages(decoded)
class CookieStorage(BaseStorage):
"""
Stores messages in a cookie.
"""
cookie_name = 'messages'
# We should be able to store 4K in a cookie, but Internet Explorer
# imposes 4K as the *total* limit for a domain. To allow other
# cookies, we go for 3/4 of 4K.
max_cookie_size = 3072
not_finished = '__messagesnotfinished__'
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the messages cookie. If the
not_finished sentinel value is found at the end of the message list,
remove it and return a result indicating that not all messages were
retrieved by this storage.
"""
data = self.request.COOKIES.get(self.cookie_name)
messages = self._decode(data)
all_retrieved = not (messages and messages[-1] == self.not_finished)
if messages and not all_retrieved:
# remove the sentinel value
messages.pop()
return messages, all_retrieved
def _update_cookie(self, encoded_data, response):
"""
Either sets the cookie with the encoded data if there is any data to
store, or deletes the cookie.
"""
if encoded_data:
response.set_cookie(self.cookie_name, encoded_data,
domain=settings.SESSION_COOKIE_DOMAIN)
else:
response.delete_cookie(self.cookie_name,
domain=settings.SESSION_COOKIE_DOMAIN)
def _store(self, messages, response, remove_oldest=True, *args, **kwargs):
"""
Stores the messages to a cookie, returning a list of any messages which
could not be stored.
If the encoded data is larger than ``max_cookie_size``, removes
messages until the data fits (these are the messages which are
returned), and add the not_finished sentinel value to indicate as much.
"""
unstored_messages = []
encoded_data = self._encode(messages)
if self.max_cookie_size:
# data is going to be stored eventually by SimpleCookie, which
# adds it's own overhead, which we must account for.
cookie = SimpleCookie() # create outside the loop
def stored_length(val):
return len(cookie.value_encode(val)[1])
while encoded_data and stored_length(encoded_data) > self.max_cookie_size:
if remove_oldest:
unstored_messages.append(messages.pop(0))
else:
unstored_messages.insert(0, messages.pop())
encoded_data = self._encode(messages + [self.not_finished],
encode_empty=unstored_messages)
self._update_cookie(encoded_data, response)
return unstored_messages
def _hash(self, value):
"""
Creates an HMAC/SHA1 hash based on the value and the project setting's
SECRET_KEY, modified to make it unique for the present purpose.
"""
key_salt = 'django.contrib.messages'
return salted_hmac(key_salt, value).hexdigest()
def _encode(self, messages, encode_empty=False):
"""
Returns an encoded version of the messages list which can be stored as
plain text.
Since the data will be retrieved from the client-side, the encoded data
also contains a hash to ensure that the data was not tampered with.
"""
if messages or encode_empty:
encoder = MessageEncoder(separators=(',', ':'))
value = encoder.encode(messages)
return '%s$%s' % (self._hash(value), value)
def _decode(self, data):
"""
Safely decodes a encoded text stream back into a list of messages.
If the encoded text stream contained an invalid hash or was in an
invalid format, ``None`` is returned.
"""
if not data:
return None
bits = data.split('$', 1)
if len(bits) == 2:
hash, value = bits
if constant_time_compare(hash, self._hash(value)):
try:
# If we get here (and the JSON decode works), everything is
# good. In any other case, drop back and return None.
return json.loads(value, cls=MessageDecoder)
except ValueError:
pass
# Mark the data as used (so it gets removed) since something was wrong
# with the data.
self.used = True
return None
|
kdwink/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyRemoveArgumentQuickFixTest/duplicate_after.py
|
80
|
def foo(a, p):
pass
foo(1, p=2)
|
alexthered/kienhoc-platform
|
refs/heads/master
|
lms/djangoapps/psychometrics/models.py
|
150
|
#
# db model for psychometrics data
#
# this data is collected in real time
#
from django.db import models
from courseware.models import StudentModule
class PsychometricData(models.Model):
"""
This data is a table linking student, module, and module performance,
including number of attempts, grade, max grade, and time of checks.
Links to instances of StudentModule, but only those for capa problems.
Note that StudentModule.module_state_key is a :class:`Location` instance.
checktimes is extracted from tracking logs, or added by capa module via psychometrics callback.
"""
studentmodule = models.ForeignKey(StudentModule, db_index=True, unique=True) # contains student, module_state_key, course_id
done = models.BooleanField(default=False)
attempts = models.IntegerField(default=0) # extracted from studentmodule.state
checktimes = models.TextField(null=True, blank=True) # internally stored as list of datetime objects
# keep in mind
# grade = studentmodule.grade
# max_grade = studentmodule.max_grade
# student = studentmodule.student
# course_id = studentmodule.course_id
# location = studentmodule.module_state_key
def __unicode__(self):
sm = self.studentmodule
return "[PsychometricData] %s url=%s, grade=%s, max=%s, attempts=%s, ct=%s" % (sm.student,
sm.module_state_key,
sm.grade,
sm.max_grade,
self.attempts,
self.checktimes)
|
Acehaidrey/incubator-airflow
|
refs/heads/master
|
tests/test_utils/azure_system_helpers.py
|
2
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import random
import string
from contextlib import contextmanager
import pytest
from airflow.providers.microsoft.azure.hooks.azure_fileshare import AzureFileShareHook
from tests.test_utils.system_tests_class import SystemTest
@contextmanager
def provide_azure_fileshare(share_name: str, wasb_conn_id: str, file_name: str, directory: str):
AzureSystemTest.prepare_share(
share_name=share_name,
wasb_conn_id=wasb_conn_id,
file_name=file_name,
directory=directory,
)
yield
AzureSystemTest.delete_share(share_name=share_name, wasb_conn_id=wasb_conn_id)
@pytest.mark.system("azure")
class AzureSystemTest(SystemTest):
@classmethod
def create_share(cls, share_name: str, wasb_conn_id: str):
hook = AzureFileShareHook(wasb_conn_id=wasb_conn_id)
hook.create_share(share_name)
@classmethod
def delete_share(cls, share_name: str, wasb_conn_id: str):
hook = AzureFileShareHook(wasb_conn_id=wasb_conn_id)
hook.delete_share(share_name)
@classmethod
def create_directory(cls, share_name: str, wasb_conn_id: str, directory: str):
hook = AzureFileShareHook(wasb_conn_id=wasb_conn_id)
hook.create_directory(share_name=share_name, directory_name=directory)
@classmethod
def upload_file_from_string(
cls,
string_data: str,
share_name: str,
wasb_conn_id: str,
file_name: str,
directory: str,
):
hook = AzureFileShareHook(wasb_conn_id=wasb_conn_id)
hook.load_string(
string_data=string_data,
share_name=share_name,
directory_name=directory,
file_name=file_name,
)
@classmethod
def prepare_share(cls, share_name: str, wasb_conn_id: str, file_name: str, directory: str):
"""
Create share with a file in given directory. If directory is None, file is in root dir.
"""
cls.create_share(share_name=share_name, wasb_conn_id=wasb_conn_id)
cls.create_directory(share_name=share_name, wasb_conn_id=wasb_conn_id, directory=directory)
string_data = "".join(random.choice(string.ascii_letters) for _ in range(1024))
cls.upload_file_from_string(
string_data=string_data,
share_name=share_name,
wasb_conn_id=wasb_conn_id,
file_name=file_name,
directory=directory,
)
|
196510921/openwrt-mtk
|
refs/heads/master
|
tools/b43-tools/files/b43-fwsquash.py
|
497
|
#!/usr/bin/env python
#
# b43 firmware file squasher
# Removes unnecessary firmware files
#
# Copyright (c) 2009 Michael Buesch <mb@bu3sch.de>
#
# Licensed under the GNU/GPL version 2 or (at your option) any later version.
#
import sys
import os
def usage():
print("Usage: %s PHYTYPES COREREVS /path/to/extracted/firmware" % sys.argv[0])
print("")
print("PHYTYPES is a comma separated list of:")
print("A => A-PHY")
print("AG => Dual A-PHY G-PHY")
print("G => G-PHY")
print("LP => LP-PHY")
print("N => N-PHY")
print("HT => HT-PHY")
print("LCN => LCN-PHY")
print("LCN40 => LCN40-PHY")
print("AC => AC-PHY")
print("")
print("COREREVS is a comma separated list of core revision numbers.")
if len(sys.argv) != 4:
usage()
sys.exit(1)
phytypes = sys.argv[1]
corerevs = sys.argv[2]
fwpath = sys.argv[3]
phytypes = phytypes.split(',')
try:
corerevs = map(lambda r: int(r), corerevs.split(','))
except ValueError:
print("ERROR: \"%s\" is not a valid COREREVS string\n" % corerevs)
usage()
sys.exit(1)
fwfiles = os.listdir(fwpath)
fwfiles = filter(lambda str: str.endswith(".fw"), fwfiles)
if not fwfiles:
print("ERROR: No firmware files found in %s" % fwpath)
sys.exit(1)
required_fwfiles = []
def revs_match(revs_a, revs_b):
for rev in revs_a:
if rev in revs_b:
return True
return False
def phytypes_match(types_a, types_b):
for type in types_a:
type = type.strip().upper()
if type in types_b:
return True
return False
revmapping = {
"ucode2.fw" : ( (2,3,), ("G",), ),
"ucode4.fw" : ( (4,), ("G",), ),
"ucode5.fw" : ( (5,6,7,8,9,10,), ("G","A","AG",), ),
"ucode11.fw" : ( (11,12,), ("N",), ),
"ucode13.fw" : ( (13,), ("LP","G",), ),
"ucode14.fw" : ( (14,), ("LP",), ),
"ucode15.fw" : ( (15,), ("LP",), ),
"ucode16_mimo.fw" : ( (16,17,18,19,23,), ("N",), ),
# "ucode16_lp.fw" : ( (16,17,18,19,), ("LP",), ),
"ucode24_lcn.fw" : ( (24,), ("LCN",), ),
"ucode25_mimo.fw" : ( (25,28,), ("N",), ),
"ucode25_lcn.fw" : ( (25,28,), ("LCN",), ),
"ucode26_mimo.fw" : ( (26,), ("HT",), ),
"ucode29_mimo.fw" : ( (29,), ("HT",), ),
"ucode30_mimo.fw" : ( (30,), ("N",), ),
"ucode33_lcn40.fw" : ( (33,), ("LCN40",), ),
"ucode40.fw" : ( (40,), ("AC",), ),
"ucode42.fw" : ( (42,), ("AC",), ),
"pcm4.fw" : ( (1,2,3,4,), ("G",), ),
"pcm5.fw" : ( (5,6,7,8,9,10,), ("G","A","AG",), ),
}
initvalmapping = {
"a0g1initvals5.fw" : ( (5,6,7,8,9,10,), ("AG",), ),
"a0g0initvals5.fw" : ( (5,6,7,8,9,10,), ("A", "AG",), ),
"b0g0initvals2.fw" : ( (2,4,), ("G",), ),
"b0g0initvals5.fw" : ( (5,6,7,8,9,10,), ("G",), ),
"b0g0initvals13.fw" : ( (13,), ("G",), ),
"n0initvals11.fw" : ( (11,12,), ("N",), ),
"n0initvals16.fw" : ( (16,17,18,23,), ("N",), ),
"n0initvals24.fw" : ( (24,), ("N",), ),
"n0initvals25.fw" : ( (25,28,), ("N",), ),
"n16initvals30.fw" : ( (30,), ("N",), ),
"lp0initvals13.fw" : ( (13,), ("LP",), ),
"lp0initvals14.fw" : ( (14,), ("LP",), ),
"lp0initvals15.fw" : ( (15,), ("LP",), ),
# "lp0initvals16.fw" : ( (16,17,18,), ("LP",), ),
"lcn0initvals24.fw" : ( (24,), ("LCN",), ),
"ht0initvals26.fw" : ( (26,), ("HT",), ),
"ht0initvals29.fw" : ( (29,), ("HT",), ),
"lcn400initvals33.fw" : ( (33,), ("LCN40",), ),
"ac0initvals40.fw" : ( (40,), ("AC",), ),
"ac1initvals42.fw" : ( (42,), ("AC",), ),
"a0g1bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("AG",), ),
"a0g0bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("A", "AG"), ),
"b0g0bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("G",), ),
"n0bsinitvals11.fw" : ( (11,12,), ("N",), ),
"n0bsinitvals16.fw" : ( (16,17,18,23,), ("N",), ),
"n0bsinitvals24.fw" : ( (24,), ("N",), ),
"n0bsinitvals25.fw" : ( (25,28,), ("N",), ),
"n16bsinitvals30.fw" : ( (30,), ("N",), ),
"lp0bsinitvals13.fw" : ( (13,), ("LP",), ),
"lp0bsinitvals14.fw" : ( (14,), ("LP",), ),
"lp0bsinitvals15.fw" : ( (15,), ("LP",), ),
# "lp0bsinitvals16.fw" : ( (16,17,18,), ("LP",), ),
"lcn0bsinitvals24.fw" : ( (24,), ("LCN",), ),
"ht0bsinitvals26.fw" : ( (26,), ("HT",), ),
"ht0bsinitvals29.fw" : ( (29,), ("HT",), ),
"lcn400bsinitvals33.fw" : ( (33,), ("LCN40",), ),
"ac0bsinitvals40.fw" : ( (40,), ("AC",), ),
"ac1bsinitvals42.fw" : ( (42,), ("AC",), ),
}
for f in fwfiles:
if f in revmapping:
if revs_match(corerevs, revmapping[f][0]) and\
phytypes_match(phytypes, revmapping[f][1]):
required_fwfiles += [f]
continue
if f in initvalmapping:
if revs_match(corerevs, initvalmapping[f][0]) and\
phytypes_match(phytypes, initvalmapping[f][1]):
required_fwfiles += [f]
continue
print("WARNING: Firmware file %s not found in the mapping lists" % f)
for f in fwfiles:
if f not in required_fwfiles:
print("Deleting %s" % f)
os.unlink(fwpath + '/' + f)
|
jkimbo/freight
|
refs/heads/master
|
freight/api/serializer/base.py
|
1
|
from __future__ import absolute_import, unicode_literals
__all__ = ['Serializer']
class Serializer(object):
def __call__(self, *args, **kwargs):
return self.serialize(*args, **kwargs)
def get_attrs(self, item_list):
return {}
def serialize(self, item, attrs):
return {}
def format_datetime(self, datetime):
if not datetime:
return
return datetime.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
|
CodingVault/LeetCodeInPython
|
refs/heads/master
|
clone_graph.py
|
1
|
#!/usr/bin/env python
# encoding: utf-8
"""
clone_graph.py
Created by Shengwei on 2014-07-13.
"""
# https://oj.leetcode.com/problems/clone-graph/
# tags: easy / medium, graph, hashtable, recursion, copy, dfs
"""
Clone an undirected graph. Each node in the graph contains a label and a list of its neighbors.
OJ's undirected graph serialization:
Nodes are labeled uniquely.
We use # as a separator for each node, and , as a separator for node label and each neighbor of the node.
As an example, consider the serialized graph {0,1,2#1,2#2,2}.
The graph has a total of three nodes, and therefore contains three parts as separated by #.
First node is labeled as 0. Connect node 0 to both nodes 1 and 2.
Second node is labeled as 1. Connect node 1 to node 2.
Third node is labeled as 2. Connect node 2 to node 2 (itself), thus forming a self-cycle.
Visually, the graph looks like the following:
1
/ \
/ \
0 --- 2
/ \
\_/
"""
# TODO: rewrite using stack instead of recurison
# Definition for a undirected graph node
# class UndirectedGraphNode:
# def __init__(self, x):
# self.label = x
# self.neighbors = []
class Solution:
# @param node, a undirected graph node
# @return a undirected graph node
def cloneGraph(self, root):
if root is None:
return None
visited = {}
def clone(node):
if node in visited:
return visited[node]
copy = UndirectedGraphNode(node.label)
visited[node] = copy
for neighbor in node.neighbors:
copy.neighbors.append(clone(neighbor))
return copy
return clone(root)
|
marcel-dancak/QGIS
|
refs/heads/master
|
scripts/qgis_fixes/fix_paren.py
|
77
|
from lib2to3.fixes.fix_paren import FixParen
|
cebrusfs/217gdb
|
refs/heads/master
|
pwndbg/constants/arm.py
|
10
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .constant import Constant
__NR_OABI_SYSCALL_BASE = Constant('__NR_OABI_SYSCALL_BASE',0x900000)
__NR_SYSCALL_BASE = Constant('__NR_SYSCALL_BASE',0)
__NR_restart_syscall = Constant('__NR_restart_syscall',(0+ 0))
__NR_exit = Constant('__NR_exit',(0+ 1))
__NR_fork = Constant('__NR_fork',(0+ 2))
__NR_read = Constant('__NR_read',(0+ 3))
__NR_write = Constant('__NR_write',(0+ 4))
__NR_open = Constant('__NR_open',(0+ 5))
__NR_close = Constant('__NR_close',(0+ 6))
__NR_creat = Constant('__NR_creat',(0+ 8))
__NR_link = Constant('__NR_link',(0+ 9))
__NR_unlink = Constant('__NR_unlink',(0+ 10))
__NR_execve = Constant('__NR_execve',(0+ 11))
__NR_chdir = Constant('__NR_chdir',(0+ 12))
__NR_time = Constant('__NR_time',(0+ 13))
__NR_mknod = Constant('__NR_mknod',(0+ 14))
__NR_chmod = Constant('__NR_chmod',(0+ 15))
__NR_lchown = Constant('__NR_lchown',(0+ 16))
__NR_lseek = Constant('__NR_lseek',(0+ 19))
__NR_getpid = Constant('__NR_getpid',(0+ 20))
__NR_mount = Constant('__NR_mount',(0+ 21))
__NR_umount = Constant('__NR_umount',(0+ 22))
__NR_setuid = Constant('__NR_setuid',(0+ 23))
__NR_getuid = Constant('__NR_getuid',(0+ 24))
__NR_stime = Constant('__NR_stime',(0+ 25))
__NR_ptrace = Constant('__NR_ptrace',(0+ 26))
__NR_alarm = Constant('__NR_alarm',(0+ 27))
__NR_pause = Constant('__NR_pause',(0+ 29))
__NR_utime = Constant('__NR_utime',(0+ 30))
__NR_access = Constant('__NR_access',(0+ 33))
__NR_nice = Constant('__NR_nice',(0+ 34))
__NR_sync = Constant('__NR_sync',(0+ 36))
__NR_kill = Constant('__NR_kill',(0+ 37))
__NR_rename = Constant('__NR_rename',(0+ 38))
__NR_mkdir = Constant('__NR_mkdir',(0+ 39))
__NR_rmdir = Constant('__NR_rmdir',(0+ 40))
__NR_dup = Constant('__NR_dup',(0+ 41))
__NR_pipe = Constant('__NR_pipe',(0+ 42))
__NR_times = Constant('__NR_times',(0+ 43))
__NR_brk = Constant('__NR_brk',(0+ 45))
__NR_setgid = Constant('__NR_setgid',(0+ 46))
__NR_getgid = Constant('__NR_getgid',(0+ 47))
__NR_geteuid = Constant('__NR_geteuid',(0+ 49))
__NR_getegid = Constant('__NR_getegid',(0+ 50))
__NR_acct = Constant('__NR_acct',(0+ 51))
__NR_umount2 = Constant('__NR_umount2',(0+ 52))
__NR_ioctl = Constant('__NR_ioctl',(0+ 54))
__NR_fcntl = Constant('__NR_fcntl',(0+ 55))
__NR_setpgid = Constant('__NR_setpgid',(0+ 57))
__NR_umask = Constant('__NR_umask',(0+ 60))
__NR_chroot = Constant('__NR_chroot',(0+ 61))
__NR_ustat = Constant('__NR_ustat',(0+ 62))
__NR_dup2 = Constant('__NR_dup2',(0+ 63))
__NR_getppid = Constant('__NR_getppid',(0+ 64))
__NR_getpgrp = Constant('__NR_getpgrp',(0+ 65))
__NR_setsid = Constant('__NR_setsid',(0+ 66))
__NR_sigaction = Constant('__NR_sigaction',(0+ 67))
__NR_setreuid = Constant('__NR_setreuid',(0+ 70))
__NR_setregid = Constant('__NR_setregid',(0+ 71))
__NR_sigsuspend = Constant('__NR_sigsuspend',(0+ 72))
__NR_sigpending = Constant('__NR_sigpending',(0+ 73))
__NR_sethostname = Constant('__NR_sethostname',(0+ 74))
__NR_setrlimit = Constant('__NR_setrlimit',(0+ 75))
__NR_getrlimit = Constant('__NR_getrlimit',(0+ 76))
__NR_getrusage = Constant('__NR_getrusage',(0+ 77))
__NR_gettimeofday = Constant('__NR_gettimeofday',(0+ 78))
__NR_settimeofday = Constant('__NR_settimeofday',(0+ 79))
__NR_getgroups = Constant('__NR_getgroups',(0+ 80))
__NR_setgroups = Constant('__NR_setgroups',(0+ 81))
__NR_select = Constant('__NR_select',(0+ 82))
__NR_symlink = Constant('__NR_symlink',(0+ 83))
__NR_readlink = Constant('__NR_readlink',(0+ 85))
__NR_uselib = Constant('__NR_uselib',(0+ 86))
__NR_swapon = Constant('__NR_swapon',(0+ 87))
__NR_reboot = Constant('__NR_reboot',(0+ 88))
__NR_readdir = Constant('__NR_readdir',(0+ 89))
__NR_mmap = Constant('__NR_mmap',(0+ 90))
__NR_munmap = Constant('__NR_munmap',(0+ 91))
__NR_truncate = Constant('__NR_truncate',(0+ 92))
__NR_ftruncate = Constant('__NR_ftruncate',(0+ 93))
__NR_fchmod = Constant('__NR_fchmod',(0+ 94))
__NR_fchown = Constant('__NR_fchown',(0+ 95))
__NR_getpriority = Constant('__NR_getpriority',(0+ 96))
__NR_setpriority = Constant('__NR_setpriority',(0+ 97))
__NR_statfs = Constant('__NR_statfs',(0+ 99))
__NR_fstatfs = Constant('__NR_fstatfs',(0+100))
__NR_socketcall = Constant('__NR_socketcall',(0+102))
__NR_syslog = Constant('__NR_syslog',(0+103))
__NR_setitimer = Constant('__NR_setitimer',(0+104))
__NR_getitimer = Constant('__NR_getitimer',(0+105))
__NR_stat = Constant('__NR_stat',(0+106))
__NR_lstat = Constant('__NR_lstat',(0+107))
__NR_fstat = Constant('__NR_fstat',(0+108))
__NR_vhangup = Constant('__NR_vhangup',(0+111))
__NR_syscall = Constant('__NR_syscall',(0+113))
__NR_wait4 = Constant('__NR_wait4',(0+114))
__NR_swapoff = Constant('__NR_swapoff',(0+115))
__NR_sysinfo = Constant('__NR_sysinfo',(0+116))
__NR_ipc = Constant('__NR_ipc',(0+117))
__NR_fsync = Constant('__NR_fsync',(0+118))
__NR_sigreturn = Constant('__NR_sigreturn',(0+119))
__NR_clone = Constant('__NR_clone',(0+120))
__NR_setdomainname = Constant('__NR_setdomainname',(0+121))
__NR_uname = Constant('__NR_uname',(0+122))
__NR_adjtimex = Constant('__NR_adjtimex',(0+124))
__NR_mprotect = Constant('__NR_mprotect',(0+125))
__NR_sigprocmask = Constant('__NR_sigprocmask',(0+126))
__NR_init_module = Constant('__NR_init_module',(0+128))
__NR_delete_module = Constant('__NR_delete_module',(0+129))
__NR_quotactl = Constant('__NR_quotactl',(0+131))
__NR_getpgid = Constant('__NR_getpgid',(0+132))
__NR_fchdir = Constant('__NR_fchdir',(0+133))
__NR_bdflush = Constant('__NR_bdflush',(0+134))
__NR_sysfs = Constant('__NR_sysfs',(0+135))
__NR_personality = Constant('__NR_personality',(0+136))
__NR_setfsuid = Constant('__NR_setfsuid',(0+138))
__NR_setfsgid = Constant('__NR_setfsgid',(0+139))
__NR__llseek = Constant('__NR__llseek',(0+140))
__NR_getdents = Constant('__NR_getdents',(0+141))
__NR__newselect = Constant('__NR__newselect',(0+142))
__NR_flock = Constant('__NR_flock',(0+143))
__NR_msync = Constant('__NR_msync',(0+144))
__NR_readv = Constant('__NR_readv',(0+145))
__NR_writev = Constant('__NR_writev',(0+146))
__NR_getsid = Constant('__NR_getsid',(0+147))
__NR_fdatasync = Constant('__NR_fdatasync',(0+148))
__NR__sysctl = Constant('__NR__sysctl',(0+149))
__NR_mlock = Constant('__NR_mlock',(0+150))
__NR_munlock = Constant('__NR_munlock',(0+151))
__NR_mlockall = Constant('__NR_mlockall',(0+152))
__NR_munlockall = Constant('__NR_munlockall',(0+153))
__NR_sched_setparam = Constant('__NR_sched_setparam',(0+154))
__NR_sched_getparam = Constant('__NR_sched_getparam',(0+155))
__NR_sched_setscheduler = Constant('__NR_sched_setscheduler',(0+156))
__NR_sched_getscheduler = Constant('__NR_sched_getscheduler',(0+157))
__NR_sched_yield = Constant('__NR_sched_yield',(0+158))
__NR_sched_get_priority_max = Constant('__NR_sched_get_priority_max',(0+159))
__NR_sched_get_priority_min = Constant('__NR_sched_get_priority_min',(0+160))
__NR_sched_rr_get_interval = Constant('__NR_sched_rr_get_interval',(0+161))
__NR_nanosleep = Constant('__NR_nanosleep',(0+162))
__NR_mremap = Constant('__NR_mremap',(0+163))
__NR_setresuid = Constant('__NR_setresuid',(0+164))
__NR_getresuid = Constant('__NR_getresuid',(0+165))
__NR_poll = Constant('__NR_poll',(0+168))
__NR_nfsservctl = Constant('__NR_nfsservctl',(0+169))
__NR_setresgid = Constant('__NR_setresgid',(0+170))
__NR_getresgid = Constant('__NR_getresgid',(0+171))
__NR_prctl = Constant('__NR_prctl',(0+172))
__NR_rt_sigreturn = Constant('__NR_rt_sigreturn',(0+173))
__NR_rt_sigaction = Constant('__NR_rt_sigaction',(0+174))
__NR_rt_sigprocmask = Constant('__NR_rt_sigprocmask',(0+175))
__NR_rt_sigpending = Constant('__NR_rt_sigpending',(0+176))
__NR_rt_sigtimedwait = Constant('__NR_rt_sigtimedwait',(0+177))
__NR_rt_sigqueueinfo = Constant('__NR_rt_sigqueueinfo',(0+178))
__NR_rt_sigsuspend = Constant('__NR_rt_sigsuspend',(0+179))
__NR_pread64 = Constant('__NR_pread64',(0+180))
__NR_pwrite64 = Constant('__NR_pwrite64',(0+181))
__NR_chown = Constant('__NR_chown',(0+182))
__NR_getcwd = Constant('__NR_getcwd',(0+183))
__NR_capget = Constant('__NR_capget',(0+184))
__NR_capset = Constant('__NR_capset',(0+185))
__NR_sigaltstack = Constant('__NR_sigaltstack',(0+186))
__NR_sendfile = Constant('__NR_sendfile',(0+187))
__NR_vfork = Constant('__NR_vfork',(0+190))
__NR_ugetrlimit = Constant('__NR_ugetrlimit',(0+191))
__NR_mmap2 = Constant('__NR_mmap2',(0+192))
__NR_truncate64 = Constant('__NR_truncate64',(0+193))
__NR_ftruncate64 = Constant('__NR_ftruncate64',(0+194))
__NR_stat64 = Constant('__NR_stat64',(0+195))
__NR_lstat64 = Constant('__NR_lstat64',(0+196))
__NR_fstat64 = Constant('__NR_fstat64',(0+197))
__NR_lchown32 = Constant('__NR_lchown32',(0+198))
__NR_getuid32 = Constant('__NR_getuid32',(0+199))
__NR_getgid32 = Constant('__NR_getgid32',(0+200))
__NR_geteuid32 = Constant('__NR_geteuid32',(0+201))
__NR_getegid32 = Constant('__NR_getegid32',(0+202))
__NR_setreuid32 = Constant('__NR_setreuid32',(0+203))
__NR_setregid32 = Constant('__NR_setregid32',(0+204))
__NR_getgroups32 = Constant('__NR_getgroups32',(0+205))
__NR_setgroups32 = Constant('__NR_setgroups32',(0+206))
__NR_fchown32 = Constant('__NR_fchown32',(0+207))
__NR_setresuid32 = Constant('__NR_setresuid32',(0+208))
__NR_getresuid32 = Constant('__NR_getresuid32',(0+209))
__NR_setresgid32 = Constant('__NR_setresgid32',(0+210))
__NR_getresgid32 = Constant('__NR_getresgid32',(0+211))
__NR_chown32 = Constant('__NR_chown32',(0+212))
__NR_setuid32 = Constant('__NR_setuid32',(0+213))
__NR_setgid32 = Constant('__NR_setgid32',(0+214))
__NR_setfsuid32 = Constant('__NR_setfsuid32',(0+215))
__NR_setfsgid32 = Constant('__NR_setfsgid32',(0+216))
__NR_getdents64 = Constant('__NR_getdents64',(0+217))
__NR_pivot_root = Constant('__NR_pivot_root',(0+218))
__NR_mincore = Constant('__NR_mincore',(0+219))
__NR_madvise = Constant('__NR_madvise',(0+220))
__NR_fcntl64 = Constant('__NR_fcntl64',(0+221))
__NR_gettid = Constant('__NR_gettid',(0+224))
__NR_readahead = Constant('__NR_readahead',(0+225))
__NR_setxattr = Constant('__NR_setxattr',(0+226))
__NR_lsetxattr = Constant('__NR_lsetxattr',(0+227))
__NR_fsetxattr = Constant('__NR_fsetxattr',(0+228))
__NR_getxattr = Constant('__NR_getxattr',(0+229))
__NR_lgetxattr = Constant('__NR_lgetxattr',(0+230))
__NR_fgetxattr = Constant('__NR_fgetxattr',(0+231))
__NR_listxattr = Constant('__NR_listxattr',(0+232))
__NR_llistxattr = Constant('__NR_llistxattr',(0+233))
__NR_flistxattr = Constant('__NR_flistxattr',(0+234))
__NR_removexattr = Constant('__NR_removexattr',(0+235))
__NR_lremovexattr = Constant('__NR_lremovexattr',(0+236))
__NR_fremovexattr = Constant('__NR_fremovexattr',(0+237))
__NR_tkill = Constant('__NR_tkill',(0+238))
__NR_sendfile64 = Constant('__NR_sendfile64',(0+239))
__NR_futex = Constant('__NR_futex',(0+240))
__NR_sched_setaffinity = Constant('__NR_sched_setaffinity',(0+241))
__NR_sched_getaffinity = Constant('__NR_sched_getaffinity',(0+242))
__NR_io_setup = Constant('__NR_io_setup',(0+243))
__NR_io_destroy = Constant('__NR_io_destroy',(0+244))
__NR_io_getevents = Constant('__NR_io_getevents',(0+245))
__NR_io_submit = Constant('__NR_io_submit',(0+246))
__NR_io_cancel = Constant('__NR_io_cancel',(0+247))
__NR_exit_group = Constant('__NR_exit_group',(0+248))
__NR_lookup_dcookie = Constant('__NR_lookup_dcookie',(0+249))
__NR_epoll_create = Constant('__NR_epoll_create',(0+250))
__NR_epoll_ctl = Constant('__NR_epoll_ctl',(0+251))
__NR_epoll_wait = Constant('__NR_epoll_wait',(0+252))
__NR_remap_file_pages = Constant('__NR_remap_file_pages',(0+253))
__NR_set_tid_address = Constant('__NR_set_tid_address',(0+256))
__NR_timer_create = Constant('__NR_timer_create',(0+257))
__NR_timer_settime = Constant('__NR_timer_settime',(0+258))
__NR_timer_gettime = Constant('__NR_timer_gettime',(0+259))
__NR_timer_getoverrun = Constant('__NR_timer_getoverrun',(0+260))
__NR_timer_delete = Constant('__NR_timer_delete',(0+261))
__NR_clock_settime = Constant('__NR_clock_settime',(0+262))
__NR_clock_gettime = Constant('__NR_clock_gettime',(0+263))
__NR_clock_getres = Constant('__NR_clock_getres',(0+264))
__NR_clock_nanosleep = Constant('__NR_clock_nanosleep',(0+265))
__NR_statfs64 = Constant('__NR_statfs64',(0+266))
__NR_fstatfs64 = Constant('__NR_fstatfs64',(0+267))
__NR_tgkill = Constant('__NR_tgkill',(0+268))
__NR_utimes = Constant('__NR_utimes',(0+269))
__NR_arm_fadvise64_64 = Constant('__NR_arm_fadvise64_64',(0+270))
__NR_pciconfig_iobase = Constant('__NR_pciconfig_iobase',(0+271))
__NR_pciconfig_read = Constant('__NR_pciconfig_read',(0+272))
__NR_pciconfig_write = Constant('__NR_pciconfig_write',(0+273))
__NR_mq_open = Constant('__NR_mq_open',(0+274))
__NR_mq_unlink = Constant('__NR_mq_unlink',(0+275))
__NR_mq_timedsend = Constant('__NR_mq_timedsend',(0+276))
__NR_mq_timedreceive = Constant('__NR_mq_timedreceive',(0+277))
__NR_mq_notify = Constant('__NR_mq_notify',(0+278))
__NR_mq_getsetattr = Constant('__NR_mq_getsetattr',(0+279))
__NR_waitid = Constant('__NR_waitid',(0+280))
__NR_socket = Constant('__NR_socket',(0+281))
__NR_bind = Constant('__NR_bind',(0+282))
__NR_connect = Constant('__NR_connect',(0+283))
__NR_listen = Constant('__NR_listen',(0+284))
__NR_accept = Constant('__NR_accept',(0+285))
__NR_getsockname = Constant('__NR_getsockname',(0+286))
__NR_getpeername = Constant('__NR_getpeername',(0+287))
__NR_socketpair = Constant('__NR_socketpair',(0+288))
__NR_send = Constant('__NR_send',(0+289))
__NR_sendto = Constant('__NR_sendto',(0+290))
__NR_recv = Constant('__NR_recv',(0+291))
__NR_recvfrom = Constant('__NR_recvfrom',(0+292))
__NR_shutdown = Constant('__NR_shutdown',(0+293))
__NR_setsockopt = Constant('__NR_setsockopt',(0+294))
__NR_getsockopt = Constant('__NR_getsockopt',(0+295))
__NR_sendmsg = Constant('__NR_sendmsg',(0+296))
__NR_recvmsg = Constant('__NR_recvmsg',(0+297))
__NR_semop = Constant('__NR_semop',(0+298))
__NR_semget = Constant('__NR_semget',(0+299))
__NR_semctl = Constant('__NR_semctl',(0+300))
__NR_msgsnd = Constant('__NR_msgsnd',(0+301))
__NR_msgrcv = Constant('__NR_msgrcv',(0+302))
__NR_msgget = Constant('__NR_msgget',(0+303))
__NR_msgctl = Constant('__NR_msgctl',(0+304))
__NR_shmat = Constant('__NR_shmat',(0+305))
__NR_shmdt = Constant('__NR_shmdt',(0+306))
__NR_shmget = Constant('__NR_shmget',(0+307))
__NR_shmctl = Constant('__NR_shmctl',(0+308))
__NR_add_key = Constant('__NR_add_key',(0+309))
__NR_request_key = Constant('__NR_request_key',(0+310))
__NR_keyctl = Constant('__NR_keyctl',(0+311))
__NR_semtimedop = Constant('__NR_semtimedop',(0+312))
__NR_vserver = Constant('__NR_vserver',(0+313))
__NR_ioprio_set = Constant('__NR_ioprio_set',(0+314))
__NR_ioprio_get = Constant('__NR_ioprio_get',(0+315))
__NR_inotify_init = Constant('__NR_inotify_init',(0+316))
__NR_inotify_add_watch = Constant('__NR_inotify_add_watch',(0+317))
__NR_inotify_rm_watch = Constant('__NR_inotify_rm_watch',(0+318))
__NR_mbind = Constant('__NR_mbind',(0+319))
__NR_get_mempolicy = Constant('__NR_get_mempolicy',(0+320))
__NR_set_mempolicy = Constant('__NR_set_mempolicy',(0+321))
__NR_openat = Constant('__NR_openat',(0+322))
__NR_mkdirat = Constant('__NR_mkdirat',(0+323))
__NR_mknodat = Constant('__NR_mknodat',(0+324))
__NR_fchownat = Constant('__NR_fchownat',(0+325))
__NR_futimesat = Constant('__NR_futimesat',(0+326))
__NR_fstatat64 = Constant('__NR_fstatat64',(0+327))
__NR_unlinkat = Constant('__NR_unlinkat',(0+328))
__NR_renameat = Constant('__NR_renameat',(0+329))
__NR_linkat = Constant('__NR_linkat',(0+330))
__NR_symlinkat = Constant('__NR_symlinkat',(0+331))
__NR_readlinkat = Constant('__NR_readlinkat',(0+332))
__NR_fchmodat = Constant('__NR_fchmodat',(0+333))
__NR_faccessat = Constant('__NR_faccessat',(0+334))
__NR_unshare = Constant('__NR_unshare',(0+337))
__NR_set_robust_list = Constant('__NR_set_robust_list',(0+338))
__NR_get_robust_list = Constant('__NR_get_robust_list',(0+339))
__NR_splice = Constant('__NR_splice',(0+340))
__NR_arm_sync_file_range = Constant('__NR_arm_sync_file_range',(0+341))
__NR_tee = Constant('__NR_tee',(0+342))
__NR_vmsplice = Constant('__NR_vmsplice',(0+343))
__NR_move_pages = Constant('__NR_move_pages',(0+344))
__NR_getcpu = Constant('__NR_getcpu',(0+345))
__NR_kexec_load = Constant('__NR_kexec_load',(0+347))
__NR_utimensat = Constant('__NR_utimensat',(0+348))
__NR_signalfd = Constant('__NR_signalfd',(0+349))
__NR_timerfd = Constant('__NR_timerfd',(0+350))
__NR_eventfd = Constant('__NR_eventfd',(0+351))
__NR_fallocate = Constant('__NR_fallocate',(0+352))
__NR_timerfd_settime = Constant('__NR_timerfd_settime',(0+353))
__NR_timerfd_gettime = Constant('__NR_timerfd_gettime',(0+354))
__SYS_NERR = Constant('__SYS_NERR',((129) + 1))
_SYS_TIME_H = Constant('_SYS_TIME_H',1)
SYS_accept = Constant('SYS_accept',(0+285))
SYS_access = Constant('SYS_access',(0+ 33))
SYS_acct = Constant('SYS_acct',(0+ 51))
SYS_add_key = Constant('SYS_add_key',(0+309))
SYS_adjtimex = Constant('SYS_adjtimex',(0+124))
SYS_alarm = Constant('SYS_alarm',(0+ 27))
SYS_arm_fadvise64_64 = Constant('SYS_arm_fadvise64_64',(0+270))
SYS_arm_sync_file_range = Constant('SYS_arm_sync_file_range',(0+341))
SYS_bdflush = Constant('SYS_bdflush',(0+134))
SYS_bind = Constant('SYS_bind',(0+282))
SYS_brk = Constant('SYS_brk',(0+ 45))
SYS_capget = Constant('SYS_capget',(0+184))
SYS_capset = Constant('SYS_capset',(0+185))
SYS_chdir = Constant('SYS_chdir',(0+ 12))
SYS_chmod = Constant('SYS_chmod',(0+ 15))
SYS_chown = Constant('SYS_chown',(0+182))
SYS_chown32 = Constant('SYS_chown32',(0+212))
SYS_chroot = Constant('SYS_chroot',(0+ 61))
SYS_clock_getres = Constant('SYS_clock_getres',(0+264))
SYS_clock_gettime = Constant('SYS_clock_gettime',(0+263))
SYS_clock_nanosleep = Constant('SYS_clock_nanosleep',(0+265))
SYS_clock_settime = Constant('SYS_clock_settime',(0+262))
SYS_clone = Constant('SYS_clone',(0+120))
SYS_close = Constant('SYS_close',(0+ 6))
SYS_connect = Constant('SYS_connect',(0+283))
SYS_creat = Constant('SYS_creat',(0+ 8))
SYS_delete_module = Constant('SYS_delete_module',(0+129))
SYS_dup = Constant('SYS_dup',(0+ 41))
SYS_dup2 = Constant('SYS_dup2',(0+ 63))
SYS_epoll_create = Constant('SYS_epoll_create',(0+250))
SYS_epoll_ctl = Constant('SYS_epoll_ctl',(0+251))
SYS_epoll_wait = Constant('SYS_epoll_wait',(0+252))
SYS_eventfd = Constant('SYS_eventfd',(0+351))
SYS_execve = Constant('SYS_execve',(0+ 11))
SYS_exit = Constant('SYS_exit',(0+ 1))
SYS_exit_group = Constant('SYS_exit_group',(0+248))
SYS_faccessat = Constant('SYS_faccessat',(0+334))
SYS_fallocate = Constant('SYS_fallocate',(0+352))
SYS_fchdir = Constant('SYS_fchdir',(0+133))
SYS_fchmod = Constant('SYS_fchmod',(0+ 94))
SYS_fchmodat = Constant('SYS_fchmodat',(0+333))
SYS_fchown = Constant('SYS_fchown',(0+ 95))
SYS_fchown32 = Constant('SYS_fchown32',(0+207))
SYS_fchownat = Constant('SYS_fchownat',(0+325))
SYS_fcntl = Constant('SYS_fcntl',(0+ 55))
SYS_fcntl64 = Constant('SYS_fcntl64',(0+221))
SYS_fdatasync = Constant('SYS_fdatasync',(0+148))
SYS_fgetxattr = Constant('SYS_fgetxattr',(0+231))
SYS_flistxattr = Constant('SYS_flistxattr',(0+234))
SYS_flock = Constant('SYS_flock',(0+143))
SYS_fork = Constant('SYS_fork',(0+ 2))
SYS_fremovexattr = Constant('SYS_fremovexattr',(0+237))
SYS_fsetxattr = Constant('SYS_fsetxattr',(0+228))
SYS_fstat = Constant('SYS_fstat',(0+108))
SYS_fstat64 = Constant('SYS_fstat64',(0+197))
SYS_fstatat64 = Constant('SYS_fstatat64',(0+327))
SYS_fstatfs = Constant('SYS_fstatfs',(0+100))
SYS_fstatfs64 = Constant('SYS_fstatfs64',(0+267))
SYS_fsync = Constant('SYS_fsync',(0+118))
SYS_ftruncate = Constant('SYS_ftruncate',(0+ 93))
SYS_ftruncate64 = Constant('SYS_ftruncate64',(0+194))
SYS_futex = Constant('SYS_futex',(0+240))
SYS_futimesat = Constant('SYS_futimesat',(0+326))
SYS_getcpu = Constant('SYS_getcpu',(0+345))
SYS_getcwd = Constant('SYS_getcwd',(0+183))
SYS_getdents = Constant('SYS_getdents',(0+141))
SYS_getdents64 = Constant('SYS_getdents64',(0+217))
SYS_getegid = Constant('SYS_getegid',(0+ 50))
SYS_getegid32 = Constant('SYS_getegid32',(0+202))
SYS_geteuid = Constant('SYS_geteuid',(0+ 49))
SYS_geteuid32 = Constant('SYS_geteuid32',(0+201))
SYS_getgid = Constant('SYS_getgid',(0+ 47))
SYS_getgid32 = Constant('SYS_getgid32',(0+200))
SYS_getgroups = Constant('SYS_getgroups',(0+ 80))
SYS_getgroups32 = Constant('SYS_getgroups32',(0+205))
SYS_getitimer = Constant('SYS_getitimer',(0+105))
SYS_get_mempolicy = Constant('SYS_get_mempolicy',(0+320))
SYS_getpeername = Constant('SYS_getpeername',(0+287))
SYS_getpgid = Constant('SYS_getpgid',(0+132))
SYS_getpgrp = Constant('SYS_getpgrp',(0+ 65))
SYS_getpid = Constant('SYS_getpid',(0+ 20))
SYS_getppid = Constant('SYS_getppid',(0+ 64))
SYS_getpriority = Constant('SYS_getpriority',(0+ 96))
SYS_getresgid = Constant('SYS_getresgid',(0+171))
SYS_getresgid32 = Constant('SYS_getresgid32',(0+211))
SYS_getresuid = Constant('SYS_getresuid',(0+165))
SYS_getresuid32 = Constant('SYS_getresuid32',(0+209))
SYS_getrlimit = Constant('SYS_getrlimit',(0+ 76))
SYS_get_robust_list = Constant('SYS_get_robust_list',(0+339))
SYS_getrusage = Constant('SYS_getrusage',(0+ 77))
SYS_getsid = Constant('SYS_getsid',(0+147))
SYS_getsockname = Constant('SYS_getsockname',(0+286))
SYS_getsockopt = Constant('SYS_getsockopt',(0+295))
SYS_gettid = Constant('SYS_gettid',(0+224))
SYS_gettimeofday = Constant('SYS_gettimeofday',(0+ 78))
SYS_getuid = Constant('SYS_getuid',(0+ 24))
SYS_getuid32 = Constant('SYS_getuid32',(0+199))
SYS_getxattr = Constant('SYS_getxattr',(0+229))
SYS_init_module = Constant('SYS_init_module',(0+128))
SYS_inotify_add_watch = Constant('SYS_inotify_add_watch',(0+317))
SYS_inotify_init = Constant('SYS_inotify_init',(0+316))
SYS_inotify_rm_watch = Constant('SYS_inotify_rm_watch',(0+318))
SYS_io_cancel = Constant('SYS_io_cancel',(0+247))
SYS_ioctl = Constant('SYS_ioctl',(0+ 54))
SYS_io_destroy = Constant('SYS_io_destroy',(0+244))
SYS_io_getevents = Constant('SYS_io_getevents',(0+245))
SYS_ioprio_get = Constant('SYS_ioprio_get',(0+315))
SYS_ioprio_set = Constant('SYS_ioprio_set',(0+314))
SYS_io_setup = Constant('SYS_io_setup',(0+243))
SYS_io_submit = Constant('SYS_io_submit',(0+246))
SYS_ipc = Constant('SYS_ipc',(0+117))
SYS_kexec_load = Constant('SYS_kexec_load',(0+347))
SYS_keyctl = Constant('SYS_keyctl',(0+311))
SYS_kill = Constant('SYS_kill',(0+ 37))
SYS_lchown = Constant('SYS_lchown',(0+ 16))
SYS_lchown32 = Constant('SYS_lchown32',(0+198))
SYS_lgetxattr = Constant('SYS_lgetxattr',(0+230))
SYS_link = Constant('SYS_link',(0+ 9))
SYS_linkat = Constant('SYS_linkat',(0+330))
SYS_listen = Constant('SYS_listen',(0+284))
SYS_listxattr = Constant('SYS_listxattr',(0+232))
SYS_llistxattr = Constant('SYS_llistxattr',(0+233))
SYS__llseek = Constant('SYS__llseek',(0+140))
SYS_lookup_dcookie = Constant('SYS_lookup_dcookie',(0+249))
SYS_lremovexattr = Constant('SYS_lremovexattr',(0+236))
SYS_lseek = Constant('SYS_lseek',(0+ 19))
SYS_lsetxattr = Constant('SYS_lsetxattr',(0+227))
SYS_lstat = Constant('SYS_lstat',(0+107))
SYS_lstat64 = Constant('SYS_lstat64',(0+196))
SYS_madvise = Constant('SYS_madvise',(0+220))
SYS_mbind = Constant('SYS_mbind',(0+319))
SYS_mincore = Constant('SYS_mincore',(0+219))
SYS_mkdir = Constant('SYS_mkdir',(0+ 39))
SYS_mkdirat = Constant('SYS_mkdirat',(0+323))
SYS_mknod = Constant('SYS_mknod',(0+ 14))
SYS_mknodat = Constant('SYS_mknodat',(0+324))
SYS_mlock = Constant('SYS_mlock',(0+150))
SYS_mlockall = Constant('SYS_mlockall',(0+152))
SYS_mmap = Constant('SYS_mmap',(0+ 90))
SYS_mmap2 = Constant('SYS_mmap2',(0+192))
SYS_mount = Constant('SYS_mount',(0+ 21))
SYS_move_pages = Constant('SYS_move_pages',(0+344))
SYS_mprotect = Constant('SYS_mprotect',(0+125))
SYS_mq_getsetattr = Constant('SYS_mq_getsetattr',(0+279))
SYS_mq_notify = Constant('SYS_mq_notify',(0+278))
SYS_mq_open = Constant('SYS_mq_open',(0+274))
SYS_mq_timedreceive = Constant('SYS_mq_timedreceive',(0+277))
SYS_mq_timedsend = Constant('SYS_mq_timedsend',(0+276))
SYS_mq_unlink = Constant('SYS_mq_unlink',(0+275))
SYS_mremap = Constant('SYS_mremap',(0+163))
SYS_msgctl = Constant('SYS_msgctl',(0+304))
SYS_msgget = Constant('SYS_msgget',(0+303))
SYS_msgrcv = Constant('SYS_msgrcv',(0+302))
SYS_msgsnd = Constant('SYS_msgsnd',(0+301))
SYS_msync = Constant('SYS_msync',(0+144))
SYS_munlock = Constant('SYS_munlock',(0+151))
SYS_munlockall = Constant('SYS_munlockall',(0+153))
SYS_munmap = Constant('SYS_munmap',(0+ 91))
SYS_nanosleep = Constant('SYS_nanosleep',(0+162))
SYS__newselect = Constant('SYS__newselect',(0+142))
SYS_nfsservctl = Constant('SYS_nfsservctl',(0+169))
SYS_nice = Constant('SYS_nice',(0+ 34))
SYS_OABI_SYSCALL_BASE = Constant('SYS_OABI_SYSCALL_BASE',0x900000)
SYS_open = Constant('SYS_open',(0+ 5))
SYS_openat = Constant('SYS_openat',(0+322))
SYS_pause = Constant('SYS_pause',(0+ 29))
SYS_pciconfig_iobase = Constant('SYS_pciconfig_iobase',(0+271))
SYS_pciconfig_read = Constant('SYS_pciconfig_read',(0+272))
SYS_pciconfig_write = Constant('SYS_pciconfig_write',(0+273))
SYS_personality = Constant('SYS_personality',(0+136))
SYS_pipe = Constant('SYS_pipe',(0+ 42))
SYS_pivot_root = Constant('SYS_pivot_root',(0+218))
SYS_poll = Constant('SYS_poll',(0+168))
SYS_prctl = Constant('SYS_prctl',(0+172))
SYS_pread64 = Constant('SYS_pread64',(0+180))
SYS_ptrace = Constant('SYS_ptrace',(0+ 26))
SYS_pwrite64 = Constant('SYS_pwrite64',(0+181))
SYS_quotactl = Constant('SYS_quotactl',(0+131))
SYS_read = Constant('SYS_read',(0+ 3))
SYS_readahead = Constant('SYS_readahead',(0+225))
SYS_readdir = Constant('SYS_readdir',(0+ 89))
SYS_readlink = Constant('SYS_readlink',(0+ 85))
SYS_readlinkat = Constant('SYS_readlinkat',(0+332))
SYS_readv = Constant('SYS_readv',(0+145))
SYS_reboot = Constant('SYS_reboot',(0+ 88))
SYS_recv = Constant('SYS_recv',(0+291))
SYS_recvfrom = Constant('SYS_recvfrom',(0+292))
SYS_recvmsg = Constant('SYS_recvmsg',(0+297))
SYS_remap_file_pages = Constant('SYS_remap_file_pages',(0+253))
SYS_removexattr = Constant('SYS_removexattr',(0+235))
SYS_rename = Constant('SYS_rename',(0+ 38))
SYS_renameat = Constant('SYS_renameat',(0+329))
SYS_request_key = Constant('SYS_request_key',(0+310))
SYS_restart_syscall = Constant('SYS_restart_syscall',(0+ 0))
SYS_rmdir = Constant('SYS_rmdir',(0+ 40))
SYS_rt_sigaction = Constant('SYS_rt_sigaction',(0+174))
SYS_rt_sigpending = Constant('SYS_rt_sigpending',(0+176))
SYS_rt_sigprocmask = Constant('SYS_rt_sigprocmask',(0+175))
SYS_rt_sigqueueinfo = Constant('SYS_rt_sigqueueinfo',(0+178))
SYS_rt_sigreturn = Constant('SYS_rt_sigreturn',(0+173))
SYS_rt_sigsuspend = Constant('SYS_rt_sigsuspend',(0+179))
SYS_rt_sigtimedwait = Constant('SYS_rt_sigtimedwait',(0+177))
SYS_sched_getaffinity = Constant('SYS_sched_getaffinity',(0+242))
SYS_sched_getparam = Constant('SYS_sched_getparam',(0+155))
SYS_sched_get_priority_max = Constant('SYS_sched_get_priority_max',(0+159))
SYS_sched_get_priority_min = Constant('SYS_sched_get_priority_min',(0+160))
SYS_sched_getscheduler = Constant('SYS_sched_getscheduler',(0+157))
SYS_sched_rr_get_interval = Constant('SYS_sched_rr_get_interval',(0+161))
SYS_sched_setaffinity = Constant('SYS_sched_setaffinity',(0+241))
SYS_sched_setparam = Constant('SYS_sched_setparam',(0+154))
SYS_sched_setscheduler = Constant('SYS_sched_setscheduler',(0+156))
SYS_sched_yield = Constant('SYS_sched_yield',(0+158))
SYS_select = Constant('SYS_select',(0+ 82))
SYS_semctl = Constant('SYS_semctl',(0+300))
SYS_semget = Constant('SYS_semget',(0+299))
SYS_semop = Constant('SYS_semop',(0+298))
SYS_semtimedop = Constant('SYS_semtimedop',(0+312))
SYS_send = Constant('SYS_send',(0+289))
SYS_sendfile = Constant('SYS_sendfile',(0+187))
SYS_sendfile64 = Constant('SYS_sendfile64',(0+239))
SYS_sendmsg = Constant('SYS_sendmsg',(0+296))
SYS_sendto = Constant('SYS_sendto',(0+290))
SYS_setdomainname = Constant('SYS_setdomainname',(0+121))
SYS_setfsgid = Constant('SYS_setfsgid',(0+139))
SYS_setfsgid32 = Constant('SYS_setfsgid32',(0+216))
SYS_setfsuid = Constant('SYS_setfsuid',(0+138))
SYS_setfsuid32 = Constant('SYS_setfsuid32',(0+215))
SYS_setgid = Constant('SYS_setgid',(0+ 46))
SYS_setgid32 = Constant('SYS_setgid32',(0+214))
SYS_setgroups = Constant('SYS_setgroups',(0+ 81))
SYS_setgroups32 = Constant('SYS_setgroups32',(0+206))
SYS_sethostname = Constant('SYS_sethostname',(0+ 74))
SYS_setitimer = Constant('SYS_setitimer',(0+104))
SYS_set_mempolicy = Constant('SYS_set_mempolicy',(0+321))
SYS_setpgid = Constant('SYS_setpgid',(0+ 57))
SYS_setpriority = Constant('SYS_setpriority',(0+ 97))
SYS_setregid = Constant('SYS_setregid',(0+ 71))
SYS_setregid32 = Constant('SYS_setregid32',(0+204))
SYS_setresgid = Constant('SYS_setresgid',(0+170))
SYS_setresgid32 = Constant('SYS_setresgid32',(0+210))
SYS_setresuid = Constant('SYS_setresuid',(0+164))
SYS_setresuid32 = Constant('SYS_setresuid32',(0+208))
SYS_setreuid = Constant('SYS_setreuid',(0+ 70))
SYS_setreuid32 = Constant('SYS_setreuid32',(0+203))
SYS_setrlimit = Constant('SYS_setrlimit',(0+ 75))
SYS_set_robust_list = Constant('SYS_set_robust_list',(0+338))
SYS_setsid = Constant('SYS_setsid',(0+ 66))
SYS_setsockopt = Constant('SYS_setsockopt',(0+294))
SYS_set_tid_address = Constant('SYS_set_tid_address',(0+256))
SYS_settimeofday = Constant('SYS_settimeofday',(0+ 79))
SYS_setuid = Constant('SYS_setuid',(0+ 23))
SYS_setuid32 = Constant('SYS_setuid32',(0+213))
SYS_setxattr = Constant('SYS_setxattr',(0+226))
SYS_shmat = Constant('SYS_shmat',(0+305))
SYS_shmctl = Constant('SYS_shmctl',(0+308))
SYS_shmdt = Constant('SYS_shmdt',(0+306))
SYS_shmget = Constant('SYS_shmget',(0+307))
SYS_shutdown = Constant('SYS_shutdown',(0+293))
SYS_sigaction = Constant('SYS_sigaction',(0+ 67))
SYS_sigaltstack = Constant('SYS_sigaltstack',(0+186))
SYS_signalfd = Constant('SYS_signalfd',(0+349))
SYS_sigpending = Constant('SYS_sigpending',(0+ 73))
SYS_sigprocmask = Constant('SYS_sigprocmask',(0+126))
SYS_sigreturn = Constant('SYS_sigreturn',(0+119))
SYS_sigsuspend = Constant('SYS_sigsuspend',(0+ 72))
SYS_socket = Constant('SYS_socket',(0+281))
SYS_socketcall = Constant('SYS_socketcall',(0+102))
SYS_socketpair = Constant('SYS_socketpair',(0+288))
SYS_splice = Constant('SYS_splice',(0+340))
SYS_stat = Constant('SYS_stat',(0+106))
SYS_stat64 = Constant('SYS_stat64',(0+195))
SYS_statfs = Constant('SYS_statfs',(0+ 99))
SYS_statfs64 = Constant('SYS_statfs64',(0+266))
SYS_stime = Constant('SYS_stime',(0+ 25))
SYS_swapoff = Constant('SYS_swapoff',(0+115))
SYS_swapon = Constant('SYS_swapon',(0+ 87))
SYS_symlink = Constant('SYS_symlink',(0+ 83))
SYS_symlinkat = Constant('SYS_symlinkat',(0+331))
SYS_sync = Constant('SYS_sync',(0+ 36))
SYS_syscall = Constant('SYS_syscall',(0+113))
SYS_SYSCALL_BASE = Constant('SYS_SYSCALL_BASE',0)
SYS__sysctl = Constant('SYS__sysctl',(0+149))
SYS_sysfs = Constant('SYS_sysfs',(0+135))
SYS_sysinfo = Constant('SYS_sysinfo',(0+116))
SYS_syslog = Constant('SYS_syslog',(0+103))
SYS_tee = Constant('SYS_tee',(0+342))
SYS_tgkill = Constant('SYS_tgkill',(0+268))
SYS_time = Constant('SYS_time',(0+ 13))
SYS_timer_create = Constant('SYS_timer_create',(0+257))
SYS_timer_delete = Constant('SYS_timer_delete',(0+261))
SYS_timerfd = Constant('SYS_timerfd',(0+350))
SYS_timerfd_gettime = Constant('SYS_timerfd_gettime',(0+354))
SYS_timerfd_settime = Constant('SYS_timerfd_settime',(0+353))
SYS_timer_getoverrun = Constant('SYS_timer_getoverrun',(0+260))
SYS_timer_gettime = Constant('SYS_timer_gettime',(0+259))
SYS_timer_settime = Constant('SYS_timer_settime',(0+258))
SYS_times = Constant('SYS_times',(0+ 43))
SYS_tkill = Constant('SYS_tkill',(0+238))
SYS_truncate = Constant('SYS_truncate',(0+ 92))
SYS_truncate64 = Constant('SYS_truncate64',(0+193))
SYS_ugetrlimit = Constant('SYS_ugetrlimit',(0+191))
SYS_umask = Constant('SYS_umask',(0+ 60))
SYS_umount = Constant('SYS_umount',(0+ 22))
SYS_umount2 = Constant('SYS_umount2',(0+ 52))
SYS_uname = Constant('SYS_uname',(0+122))
SYS_unlink = Constant('SYS_unlink',(0+ 10))
SYS_unlinkat = Constant('SYS_unlinkat',(0+328))
SYS_unshare = Constant('SYS_unshare',(0+337))
SYS_uselib = Constant('SYS_uselib',(0+ 86))
SYS_ustat = Constant('SYS_ustat',(0+ 62))
SYS_utime = Constant('SYS_utime',(0+ 30))
SYS_utimensat = Constant('SYS_utimensat',(0+348))
SYS_utimes = Constant('SYS_utimes',(0+269))
SYS_vfork = Constant('SYS_vfork',(0+190))
SYS_vhangup = Constant('SYS_vhangup',(0+111))
SYS_vmsplice = Constant('SYS_vmsplice',(0+343))
SYS_vserver = Constant('SYS_vserver',(0+313))
SYS_wait4 = Constant('SYS_wait4',(0+114))
SYS_waitid = Constant('SYS_waitid',(0+280))
SYS_write = Constant('SYS_write',(0+ 4))
SYS_writev = Constant('SYS_writev',(0+146))
|
arjunbm13/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/ruhd.py
|
149
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
class RUHDIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?ruhd\.ru/play\.php\?vid=(?P<id>\d+)'
_TEST = {
'url': 'http://www.ruhd.ru/play.php?vid=207',
'md5': 'd1a9ec4edf8598e3fbd92bb16072ba83',
'info_dict': {
'id': '207',
'ext': 'divx',
'title': 'КОТ бааааам',
'description': 'классный кот)',
'thumbnail': 're:^http://.*\.jpg$',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'<param name="src" value="([^"]+)"', webpage, 'video url')
title = self._html_search_regex(
r'<title>([^<]+) RUHD.ru - Видео Высокого качества №1 в России!</title>',
webpage, 'title')
description = self._html_search_regex(
r'(?s)<div id="longdesc">(.+?)<span id="showlink">',
webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'<param name="previewImage" value="([^"]+)"',
webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = 'http://www.ruhd.ru' + thumbnail
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
|
Soolo-ss/Solo
|
refs/heads/master
|
Plugins/libevent-master/event_rpcgen.py
|
1
|
#!/usr/bin/env python
#
# Copyright (c) 2005-2007 Niels Provos <provos@citi.umich.edu>
# Copyright (c) 2007-2012 Niels Provos and Nick Mathewson
# All rights reserved.
#
# Generates marshaling code based on libevent.
# TODO:
# 1) use optparse to allow the strategy shell to parse options, and
# to allow the instantiated factory (for the specific output language)
# to parse remaining options
# 2) move the globals into a class that manages execution (including the
# progress outputs that space stderr at the moment)
# 3) emit other languages
import sys
import re
_NAME = "event_rpcgen.py"
_VERSION = "0.1"
# Globals
line_count = 0
white = re.compile(r'\s+')
cppcomment = re.compile(r'\/\/.*$')
nonident = re.compile(r'[^a-zA-Z0-9_]')
structref = re.compile(r'^struct\[([a-zA-Z_][a-zA-Z0-9_]*)\]$')
structdef = re.compile(r'^struct +[a-zA-Z_][a-zA-Z0-9_]* *{$')
headerdirect = []
cppdirect = []
QUIETLY = 0
def declare(s):
if not QUIETLY:
print(s)
def TranslateList(mylist, mydict):
return [x % mydict for x in mylist]
# Exception class for parse errors
class RpcGenError(Exception):
def __init__(self, why):
self.why = why
def __str__(self):
return str(self.why)
# Holds everything that makes a struct
class Struct:
def __init__(self, name):
self._name = name
self._entries = []
self._tags = {}
declare(' Created struct: %s' % name)
def AddEntry(self, entry):
if entry.Tag() in self._tags:
raise RpcGenError(
'Entry "%s" duplicates tag number %d from "%s" '
'around line %d' % (entry.Name(), entry.Tag(),
self._tags[entry.Tag()], line_count))
self._entries.append(entry)
self._tags[entry.Tag()] = entry.Name()
declare(' Added entry: %s' % entry.Name())
def Name(self):
return self._name
def EntryTagName(self, entry):
"""Creates the name inside an enumeration for distinguishing data
types."""
name = "%s_%s" % (self._name, entry.Name())
return name.upper()
def PrintIndented(self, file, ident, code):
"""Takes an array, add indentation to each entry and prints it."""
for entry in code:
file.write('%s%s\n' % (ident, entry))
class StructCCode(Struct):
""" Knows how to generate C code for a struct """
def __init__(self, name):
Struct.__init__(self, name)
def PrintTags(self, file):
"""Prints the tag definitions for a structure."""
file.write('/* Tag definition for %s */\n' % self._name)
file.write('enum %s_ {\n' % self._name.lower())
for entry in self._entries:
file.write(' %s=%d,\n' % (self.EntryTagName(entry), entry.Tag()))
file.write(' %s_MAX_TAGS\n' % (self._name.upper()))
file.write('};\n\n')
def PrintForwardDeclaration(self, file):
file.write('struct %s;\n' % self._name)
def PrintDeclaration(self, file):
file.write('/* Structure declaration for %s */\n' % self._name)
file.write('struct %s_access_ {\n' % self._name)
for entry in self._entries:
dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name())
dcl.extend(
entry.GetDeclaration('(*%s_get)' % entry.Name()))
if entry.Array():
dcl.extend(
entry.AddDeclaration('(*%s_add)' % entry.Name()))
self.PrintIndented(file, ' ', dcl)
file.write('};\n\n')
file.write('struct %s {\n' % self._name)
file.write(' struct %s_access_ *base;\n\n' % self._name)
for entry in self._entries:
dcl = entry.Declaration()
self.PrintIndented(file, ' ', dcl)
file.write('\n')
for entry in self._entries:
file.write(' ev_uint8_t %s_set;\n' % entry.Name())
file.write('};\n\n')
file.write("""struct %(name)s *%(name)s_new(void);
struct %(name)s *%(name)s_new_with_arg(void *);
void %(name)s_free(struct %(name)s *);
void %(name)s_clear(struct %(name)s *);
void %(name)s_marshal(struct evbuffer *, const struct %(name)s *);
int %(name)s_unmarshal(struct %(name)s *, struct evbuffer *);
int %(name)s_complete(struct %(name)s *);
void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t,
const struct %(name)s *);
int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t,
struct %(name)s *);\n""" % { 'name' : self._name })
# Write a setting function of every variable
for entry in self._entries:
self.PrintIndented(file, '', entry.AssignDeclaration(
entry.AssignFuncName()))
self.PrintIndented(file, '', entry.GetDeclaration(
entry.GetFuncName()))
if entry.Array():
self.PrintIndented(file, '', entry.AddDeclaration(
entry.AddFuncName()))
file.write('/* --- %s done --- */\n\n' % self._name)
def PrintCode(self, file):
file.write(('/*\n'
' * Implementation of %s\n'
' */\n\n') % self._name)
file.write('static struct %(name)s_access_ %(name)s_base__ = {\n' % \
{ 'name' : self._name })
for entry in self._entries:
self.PrintIndented(file, ' ', entry.CodeBase())
file.write('};\n\n')
# Creation
file.write((
'struct %(name)s *\n'
'%(name)s_new(void)\n'
'{\n'
' return %(name)s_new_with_arg(NULL);\n'
'}\n'
'\n'
'struct %(name)s *\n'
'%(name)s_new_with_arg(void *unused)\n'
'{\n'
' struct %(name)s *tmp;\n'
' if ((tmp = malloc(sizeof(struct %(name)s))) == NULL) {\n'
' event_warn("%%s: malloc", __func__);\n'
' return (NULL);\n'
' }\n'
' tmp->base = &%(name)s_base__;\n\n') % { 'name' : self._name })
for entry in self._entries:
self.PrintIndented(file, ' ', entry.CodeInitialize('tmp'))
file.write(' tmp->%s_set = 0;\n\n' % entry.Name())
file.write((
' return (tmp);\n'
'}\n\n'))
# Adding
for entry in self._entries:
if entry.Array():
self.PrintIndented(file, '', entry.CodeAdd())
file.write('\n')
# Assigning
for entry in self._entries:
self.PrintIndented(file, '', entry.CodeAssign())
file.write('\n')
# Getting
for entry in self._entries:
self.PrintIndented(file, '', entry.CodeGet())
file.write('\n')
# Clearing
file.write(( 'void\n'
'%(name)s_clear(struct %(name)s *tmp)\n'
'{'
'\n') % { 'name' : self._name })
for entry in self._entries:
self.PrintIndented(file, ' ', entry.CodeClear('tmp'))
file.write('}\n\n')
# Freeing
file.write(( 'void\n'
'%(name)s_free(struct %(name)s *tmp)\n'
'{'
'\n') % { 'name' : self._name })
for entry in self._entries:
self.PrintIndented(file, ' ', entry.CodeFree('tmp'))
file.write((' free(tmp);\n'
'}\n\n'))
# Marshaling
file.write(('void\n'
'%(name)s_marshal(struct evbuffer *evbuf, '
'const struct %(name)s *tmp)'
'{\n') % { 'name' : self._name })
for entry in self._entries:
indent = ' '
# Optional entries do not have to be set
if entry.Optional():
indent += ' '
file.write(' if (tmp->%s_set) {\n' % entry.Name())
self.PrintIndented(
file, indent,
entry.CodeMarshal('evbuf', self.EntryTagName(entry),
entry.GetVarName('tmp'),
entry.GetVarLen('tmp')))
if entry.Optional():
file.write(' }\n')
file.write('}\n\n')
# Unmarshaling
file.write(('int\n'
'%(name)s_unmarshal(struct %(name)s *tmp, '
' struct evbuffer *evbuf)\n'
'{\n'
' ev_uint32_t tag;\n'
' while (evbuffer_get_length(evbuf) > 0) {\n'
' if (evtag_peek(evbuf, &tag) == -1)\n'
' return (-1);\n'
' switch (tag) {\n'
'\n') % { 'name' : self._name })
for entry in self._entries:
file.write(' case %s:\n' % self.EntryTagName(entry))
if not entry.Array():
file.write((
' if (tmp->%s_set)\n'
' return (-1);'
'\n') % (entry.Name()))
self.PrintIndented(
file, ' ',
entry.CodeUnmarshal('evbuf',
self.EntryTagName(entry),
entry.GetVarName('tmp'),
entry.GetVarLen('tmp')))
file.write(( ' tmp->%s_set = 1;\n' % entry.Name() +
' break;\n' ))
file.write(( ' default:\n'
' return -1;\n'
' }\n'
' }\n\n' ))
# Check if it was decoded completely
file.write(( ' if (%(name)s_complete(tmp) == -1)\n'
' return (-1);'
'\n') % { 'name' : self._name })
# Successfully decoded
file.write(( ' return (0);\n'
'}\n\n'))
# Checking if a structure has all the required data
file.write((
'int\n'
'%(name)s_complete(struct %(name)s *msg)\n'
'{\n' ) % { 'name' : self._name })
for entry in self._entries:
if not entry.Optional():
code = [
'if (!msg->%(name)s_set)',
' return (-1);' ]
code = TranslateList(code, entry.GetTranslation())
self.PrintIndented(
file, ' ', code)
self.PrintIndented(
file, ' ',
entry.CodeComplete('msg', entry.GetVarName('msg')))
file.write((
' return (0);\n'
'}\n\n' ))
# Complete message unmarshaling
file.write((
'int\n'
'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, '
'ev_uint32_t need_tag, struct %(name)s *msg)\n'
'{\n'
' ev_uint32_t tag;\n'
' int res = -1;\n'
'\n'
' struct evbuffer *tmp = evbuffer_new();\n'
'\n'
' if (evtag_unmarshal(evbuf, &tag, tmp) == -1'
' || tag != need_tag)\n'
' goto error;\n'
'\n'
' if (%(name)s_unmarshal(msg, tmp) == -1)\n'
' goto error;\n'
'\n'
' res = 0;\n'
'\n'
' error:\n'
' evbuffer_free(tmp);\n'
' return (res);\n'
'}\n\n' ) % { 'name' : self._name })
# Complete message marshaling
file.write((
'void\n'
'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, '
'const struct %(name)s *msg)\n'
'{\n'
' struct evbuffer *buf_ = evbuffer_new();\n'
' assert(buf_ != NULL);\n'
' %(name)s_marshal(buf_, msg);\n'
' evtag_marshal_buffer(evbuf, tag, buf_);\n '
' evbuffer_free(buf_);\n'
'}\n\n' ) % { 'name' : self._name })
class Entry:
def __init__(self, type, name, tag):
self._type = type
self._name = name
self._tag = int(tag)
self._ctype = type
self._optional = 0
self._can_be_array = 0
self._array = 0
self._line_count = -1
self._struct = None
self._refname = None
self._optpointer = True
self._optaddarg = True
def GetInitializer(self):
assert 0, "Entry does not provide initializer"
def SetStruct(self, struct):
self._struct = struct
def LineCount(self):
assert self._line_count != -1
return self._line_count
def SetLineCount(self, number):
self._line_count = number
def Array(self):
return self._array
def Optional(self):
return self._optional
def Tag(self):
return self._tag
def Name(self):
return self._name
def Type(self):
return self._type
def MakeArray(self, yes=1):
self._array = yes
def MakeOptional(self):
self._optional = 1
def Verify(self):
if self.Array() and not self._can_be_array:
raise RpcGenError(
'Entry "%s" cannot be created as an array '
'around line %d' % (self._name, self.LineCount()))
if not self._struct:
raise RpcGenError(
'Entry "%s" does not know which struct it belongs to '
'around line %d' % (self._name, self.LineCount()))
if self._optional and self._array:
raise RpcGenError(
'Entry "%s" has illegal combination of optional and array '
'around line %d' % (self._name, self.LineCount()))
def GetTranslation(self, extradict = {}):
mapping = {
"parent_name" : self._struct.Name(),
"name" : self._name,
"ctype" : self._ctype,
"refname" : self._refname,
"optpointer" : self._optpointer and "*" or "",
"optreference" : self._optpointer and "&" or "",
"optaddarg" :
self._optaddarg and ", const %s value" % self._ctype or ""
}
for (k, v) in list(extradict.items()):
mapping[k] = v
return mapping
def GetVarName(self, var):
return '%(var)s->%(name)s_data' % self.GetTranslation({ 'var' : var })
def GetVarLen(self, var):
return 'sizeof(%s)' % self._ctype
def GetFuncName(self):
return '%s_%s_get' % (self._struct.Name(), self._name)
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeGet(self):
code = (
'int',
'%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, '
'%(ctype)s *value)',
'{',
' if (msg->%(name)s_set != 1)',
' return (-1);',
' *value = msg->%(name)s_data;',
' return (0);',
'}' )
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def AssignFuncName(self):
return '%s_%s_assign' % (self._struct.Name(), self._name)
def AddFuncName(self):
return '%s_%s_add' % (self._struct.Name(), self._name)
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
code = [ 'int',
'%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,'
' const %(ctype)s value)',
'{',
' msg->%(name)s_set = 1;',
' msg->%(name)s_data = value;',
' return (0);',
'}' ]
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()) ]
return code
def CodeComplete(self, structname, var_name):
return []
def CodeFree(self, name):
return []
def CodeBase(self):
code = [
'%(parent_name)s_%(name)s_assign,',
'%(parent_name)s_%(name)s_get,'
]
if self.Array():
code.append('%(parent_name)s_%(name)s_add,')
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
class EntryBytes(Entry):
def __init__(self, type, name, tag, length):
# Init base class
Entry.__init__(self, type, name, tag)
self._length = length
self._ctype = 'ev_uint8_t'
def GetInitializer(self):
return "NULL"
def GetVarLen(self, var):
return '(%s)' % self._length
def CodeArrayAdd(self, varname, value):
# XXX: copy here
return [ '%(varname)s = NULL;' % { 'varname' : varname } ]
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def Declaration(self):
dcl = ['ev_uint8_t %s_data[%s];' % (self._name, self._length)]
return dcl
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' msg->%s_set = 1;' % name,
' memcpy(msg->%s_data, value, %s);' % (
name, self._length),
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
code = [ 'if (evtag_unmarshal_fixed(%(buf)s, %(tag)s, '
'%(var)s, %(varlen)s) == -1) {',
' event_warnx("%%s: failed to unmarshal %(name)s", __func__);',
' return (-1);',
'}'
]
return TranslateList(code,
self.GetTranslation({
'var' : var_name,
'varlen' : var_len,
'buf' : buf,
'tag' : tag_name }))
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = ['evtag_marshal(%s, %s, %s, %s);' % (
buf, tag_name, var_name, var_len)]
return code
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()),
'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
structname, self._name, structname, self._name)]
return code
def CodeInitialize(self, name):
code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
name, self._name, name, self._name)]
return code
def Verify(self):
if not self._length:
raise RpcGenError(
'Entry "%s" needs a length '
'around line %d' % (self._name, self.LineCount()))
Entry.Verify(self)
class EntryInt(Entry):
def __init__(self, type, name, tag, bits=32):
# Init base class
Entry.__init__(self, type, name, tag)
self._can_be_array = 1
if bits == 32:
self._ctype = 'ev_uint32_t'
self._marshal_type = 'int'
if bits == 64:
self._ctype = 'ev_uint64_t'
self._marshal_type = 'int64'
def GetInitializer(self):
return "0"
def CodeArrayFree(self, var):
return []
def CodeArrayAssign(self, varname, srcvar):
return [ '%(varname)s = %(srcvar)s;' % { 'varname' : varname,
'srcvar' : srcvar } ]
def CodeArrayAdd(self, varname, value):
"""Returns a new entry of this type."""
return [ '%(varname)s = %(value)s;' % { 'varname' : varname,
'value' : value } ]
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
code = [
'if (evtag_unmarshal_%(ma)s(%(buf)s, %(tag)s, &%(var)s) == -1) {',
' event_warnx("%%s: failed to unmarshal %(name)s", __func__);',
' return (-1);',
'}' ]
code = '\n'.join(code) % self.GetTranslation({
'ma' : self._marshal_type,
'buf' : buf,
'tag' : tag_name,
'var' : var_name })
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = [
'evtag_marshal_%s(%s, %s, %s);' % (
self._marshal_type, buf, tag_name, var_name)]
return code
def Declaration(self):
dcl = ['%s %s_data;' % (self._ctype, self._name)]
return dcl
def CodeInitialize(self, name):
code = ['%s->%s_data = 0;' % (name, self._name)]
return code
class EntryString(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._can_be_array = 1
self._ctype = 'char *'
def GetInitializer(self):
return "NULL"
def CodeArrayFree(self, varname):
code = [
'if (%(var)s != NULL) free(%(var)s);' ]
return TranslateList(code, { 'var' : varname })
def CodeArrayAssign(self, varname, srcvar):
code = [
'if (%(var)s != NULL)',
' free(%(var)s);',
'%(var)s = strdup(%(srcvar)s);',
'if (%(var)s == NULL) {',
' event_warnx("%%s: strdup", __func__);',
' return (-1);',
'}' ]
return TranslateList(code, { 'var' : varname,
'srcvar' : srcvar })
def CodeArrayAdd(self, varname, value):
code = [
'if (%(value)s != NULL) {',
' %(var)s = strdup(%(value)s);',
' if (%(var)s == NULL) {',
' goto error;',
' }',
'} else {',
' %(var)s = NULL;',
'}' ]
return TranslateList(code, { 'var' : varname,
'value' : value })
def GetVarLen(self, var):
return 'strlen(%s)' % self.GetVarName(var)
def CodeMakeInitalize(self, varname):
return '%(varname)s = NULL;' % { 'varname' : varname }
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
if (msg->%(name)s_data != NULL)
free(msg->%(name)s_data);
if ((msg->%(name)s_data = strdup(value)) == NULL)
return (-1);
msg->%(name)s_set = 1;
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
code = ['if (evtag_unmarshal_string(%(buf)s, %(tag)s, &%(var)s) == -1) {',
' event_warnx("%%s: failed to unmarshal %(name)s", __func__);',
' return (-1);',
'}'
]
code = '\n'.join(code) % self.GetTranslation({
'buf' : buf,
'tag' : tag_name,
'var' : var_name })
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = ['evtag_marshal_string(%s, %s, %s);' % (
buf, tag_name, var_name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free(%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeInitialize(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data);' % (name, self._name)]
return code
def Declaration(self):
dcl = ['char *%s_data;' % self._name]
return dcl
class EntryStruct(Entry):
def __init__(self, type, name, tag, refname):
# Init base class
Entry.__init__(self, type, name, tag)
self._optpointer = False
self._can_be_array = 1
self._refname = refname
self._ctype = 'struct %s*' % refname
self._optaddarg = False
def GetInitializer(self):
return "NULL"
def GetVarLen(self, var):
return '-1'
def CodeArrayAdd(self, varname, value):
code = [
'%(varname)s = %(refname)s_new();',
'if (%(varname)s == NULL)',
' goto error;' ]
return TranslateList(code, self.GetTranslation({ 'varname' : varname }))
def CodeArrayFree(self, var):
code = [ '%(refname)s_free(%(var)s);' % self.GetTranslation(
{ 'var' : var }) ]
return code
def CodeArrayAssign(self, var, srcvar):
code = [
'int had_error = 0;',
'struct evbuffer *tmp = NULL;',
'%(refname)s_clear(%(var)s);',
'if ((tmp = evbuffer_new()) == NULL) {',
' event_warn("%%s: evbuffer_new()", __func__);',
' had_error = 1;',
' goto done;',
'}',
'%(refname)s_marshal(tmp, %(srcvar)s);',
'if (%(refname)s_unmarshal(%(var)s, tmp) == -1) {',
' event_warnx("%%s: %(refname)s_unmarshal", __func__);',
' had_error = 1;',
' goto done;',
'}',
'done:'
'if (tmp != NULL)',
' evbuffer_free(tmp);',
'if (had_error) {',
' %(refname)s_clear(%(var)s);',
' return (-1);',
'}' ]
return TranslateList(code, self.GetTranslation({
'var' : var,
'srcvar' : srcvar}))
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1) {' % name,
' msg->%s_data = %s_new();' % (name, self._refname),
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' }',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
struct evbuffer *tmp = NULL;
if (msg->%(name)s_set) {
%(refname)s_clear(msg->%(name)s_data);
msg->%(name)s_set = 0;
} else {
msg->%(name)s_data = %(refname)s_new();
if (msg->%(name)s_data == NULL) {
event_warn("%%s: %(refname)s_new()", __func__);
goto error;
}
}
if ((tmp = evbuffer_new()) == NULL) {
event_warn("%%s: evbuffer_new()", __func__);
goto error;
}
%(refname)s_marshal(tmp, value);
if (%(refname)s_unmarshal(msg->%(name)s_data, tmp) == -1) {
event_warnx("%%s: %(refname)s_unmarshal", __func__);
goto error;
}
msg->%(name)s_set = 1;
evbuffer_free(tmp);
return (0);
error:
if (tmp != NULL)
evbuffer_free(tmp);
if (msg->%(name)s_data != NULL) {
%(refname)s_free(msg->%(name)s_data);
msg->%(name)s_data = NULL;
}
return (-1);
}""" % self.GetTranslation()
return code.split('\n')
def CodeComplete(self, structname, var_name):
code = [ 'if (%(structname)s->%(name)s_set && '
'%(refname)s_complete(%(var)s) == -1)',
' return (-1);' ]
return TranslateList(code, self.GetTranslation({
'structname' : structname,
'var' : var_name }))
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
code = ['%(var)s = %(refname)s_new();',
'if (%(var)s == NULL)',
' return (-1);',
'if (evtag_unmarshal_%(refname)s(%(buf)s, %(tag)s, '
'%(var)s) == -1) {',
' event_warnx("%%s: failed to unmarshal %(name)s", __func__);',
' return (-1);',
'}'
]
code = '\n'.join(code) % self.GetTranslation({
'buf' : buf,
'tag' : tag_name,
'var' : var_name })
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = ['evtag_marshal_%s(%s, %s, %s);' % (
self._refname, buf, tag_name, var_name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' %s_free(%s->%s_data);' % (
self._refname, structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeInitialize(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' %s_free(%s->%s_data);' % (
self._refname, name, self._name)]
return code
def Declaration(self):
dcl = ['%s %s_data;' % (self._ctype, self._name)]
return dcl
class EntryVarBytes(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'ev_uint8_t *'
def GetInitializer(self):
return "NULL"
def GetVarLen(self, var):
return '%(var)s->%(name)s_length' % self.GetTranslation({ 'var' : var })
def CodeArrayAdd(self, varname, value):
# xxx: copy
return [ '%(varname)s = NULL;' % { 'varname' : varname } ]
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *, ev_uint32_t *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s, ev_uint32_t);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, '
'const %s value, ev_uint32_t len)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_data != NULL)' % name,
' free (msg->%s_data);' % name,
' msg->%s_data = malloc(len);' % name,
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' msg->%s_length = len;' % name,
' memcpy(msg->%s_data, value, len);' % name,
' return (0);',
'}' ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value, ev_uint32_t *plen)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' *plen = msg->%s_length;' % name,
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
code = ['if (evtag_payload_length(%(buf)s, &%(varlen)s) == -1)',
' return (-1);',
# We do not want DoS opportunities
'if (%(varlen)s > evbuffer_get_length(%(buf)s))',
' return (-1);',
'if ((%(var)s = malloc(%(varlen)s)) == NULL)',
' return (-1);',
'if (evtag_unmarshal_fixed(%(buf)s, %(tag)s, %(var)s, '
'%(varlen)s) == -1) {',
' event_warnx("%%s: failed to unmarshal %(name)s", __func__);',
' return (-1);',
'}'
]
code = '\n'.join(code) % self.GetTranslation({
'buf' : buf,
'tag' : tag_name,
'var' : var_name,
'varlen' : var_len })
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = ['evtag_marshal(%s, %s, %s, %s);' % (
buf, tag_name, var_name, var_len)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeInitialize(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name) ]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free(%s->%s_data);' % (name, self._name)]
return code
def Declaration(self):
dcl = ['ev_uint8_t *%s_data;' % self._name,
'ev_uint32_t %s_length;' % self._name]
return dcl
class EntryArray(Entry):
def __init__(self, entry):
# Init base class
Entry.__init__(self, entry._type, entry._name, entry._tag)
self._entry = entry
self._refname = entry._refname
self._ctype = self._entry._ctype
self._optional = True
self._optpointer = self._entry._optpointer
self._optaddarg = self._entry._optaddarg
# provide a new function for accessing the variable name
def GetVarName(var_name):
return '%(var)s->%(name)s_data[%(index)s]' % \
self._entry.GetTranslation({'var' : var_name,
'index' : self._index})
self._entry.GetVarName = GetVarName
def GetInitializer(self):
return "NULL"
def GetVarName(self, var_name):
return var_name
def GetVarLen(self, var_name):
return '-1'
def GetDeclaration(self, funcname):
"""Allows direct access to elements of the array."""
code = [
'int %(funcname)s(struct %(parent_name)s *, int, %(ctype)s *);' %
self.GetTranslation({ 'funcname' : funcname }) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, int, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AddDeclaration(self, funcname):
code = [
'%(ctype)s %(optpointer)s '
'%(funcname)s(struct %(parent_name)s *msg%(optaddarg)s);' % \
self.GetTranslation({ 'funcname' : funcname }) ]
return code
def CodeGet(self):
code = """int
%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, int offset,
%(ctype)s *value)
{
if (!msg->%(name)s_set || offset < 0 || offset >= msg->%(name)s_length)
return (-1);
*value = msg->%(name)s_data[offset];
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeAssign(self):
code = [
'int',
'%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, int off,',
' const %(ctype)s value)',
'{',
' if (!msg->%(name)s_set || off < 0 || off >= msg->%(name)s_length)',
' return (-1);\n',
' {' ]
code = TranslateList(code, self.GetTranslation())
codearrayassign = self._entry.CodeArrayAssign(
'msg->%(name)s_data[off]' % self.GetTranslation(), 'value')
code += [' ' + x for x in codearrayassign]
code += TranslateList([
' }',
' return (0);',
'}' ], self.GetTranslation())
return code
def CodeAdd(self):
codearrayadd = self._entry.CodeArrayAdd(
'msg->%(name)s_data[msg->%(name)s_length - 1]' % self.GetTranslation(),
'value')
code = [
'static int',
'%(parent_name)s_%(name)s_expand_to_hold_more('
'struct %(parent_name)s *msg)',
'{',
' int tobe_allocated = msg->%(name)s_num_allocated;',
' %(ctype)s* new_data = NULL;',
' tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1;',
' new_data = (%(ctype)s*) realloc(msg->%(name)s_data,',
' tobe_allocated * sizeof(%(ctype)s));',
' if (new_data == NULL)',
' return -1;',
' msg->%(name)s_data = new_data;',
' msg->%(name)s_num_allocated = tobe_allocated;',
' return 0;'
'}',
'',
'%(ctype)s %(optpointer)s',
'%(parent_name)s_%(name)s_add('
'struct %(parent_name)s *msg%(optaddarg)s)',
'{',
' if (++msg->%(name)s_length >= msg->%(name)s_num_allocated) {',
' if (%(parent_name)s_%(name)s_expand_to_hold_more(msg)<0)',
' goto error;',
' }' ]
code = TranslateList(code, self.GetTranslation())
code += [' ' + x for x in codearrayadd]
code += TranslateList([
' msg->%(name)s_set = 1;',
' return %(optreference)s(msg->%(name)s_data['
'msg->%(name)s_length - 1]);',
'error:',
' --msg->%(name)s_length;',
' return (NULL);',
'}' ], self.GetTranslation())
return code
def CodeComplete(self, structname, var_name):
self._index = 'i'
tmp = self._entry.CodeComplete(structname, self._entry.GetVarName(var_name))
# skip the whole loop if there is nothing to check
if not tmp:
return []
translate = self.GetTranslation({ 'structname' : structname })
code = [
'{',
' int i;',
' for (i = 0; i < %(structname)s->%(name)s_length; ++i) {' ]
code = TranslateList(code, translate)
code += [' ' + x for x in tmp]
code += [
' }',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name, var_len):
translate = self.GetTranslation({ 'var' : var_name,
'buf' : buf,
'tag' : tag_name,
'init' : self._entry.GetInitializer()})
code = [
'if (%(var)s->%(name)s_length >= %(var)s->%(name)s_num_allocated &&',
' %(parent_name)s_%(name)s_expand_to_hold_more(%(var)s) < 0) {',
' puts("HEY NOW");',
' return (-1);',
'}']
# the unmarshal code directly returns
code = TranslateList(code, translate)
self._index = '%(var)s->%(name)s_length' % translate
code += self._entry.CodeUnmarshal(buf, tag_name,
self._entry.GetVarName(var_name),
self._entry.GetVarLen(var_name))
code += [ '++%(var)s->%(name)s_length;' % translate ]
return code
def CodeMarshal(self, buf, tag_name, var_name, var_len):
code = ['{',
' int i;',
' for (i = 0; i < %(var)s->%(name)s_length; ++i) {' ]
self._index = 'i'
code += self._entry.CodeMarshal(buf, tag_name,
self._entry.GetVarName(var_name),
self._entry.GetVarLen(var_name))
code += [' }',
'}'
]
code = "\n".join(code) % self.GetTranslation({ 'var' : var_name })
return code.split('\n')
def CodeClear(self, structname):
translate = self.GetTranslation({ 'structname' : structname })
codearrayfree = self._entry.CodeArrayFree(
'%(structname)s->%(name)s_data[i]' % self.GetTranslation(
{ 'structname' : structname } ))
code = [ 'if (%(structname)s->%(name)s_set == 1) {' ]
if codearrayfree:
code += [
' int i;',
' for (i = 0; i < %(structname)s->%(name)s_length; ++i) {' ]
code = TranslateList(code, translate)
if codearrayfree:
code += [' ' + x for x in codearrayfree]
code += [
' }' ]
code += TranslateList([
' free(%(structname)s->%(name)s_data);',
' %(structname)s->%(name)s_data = NULL;',
' %(structname)s->%(name)s_set = 0;',
' %(structname)s->%(name)s_length = 0;',
' %(structname)s->%(name)s_num_allocated = 0;',
'}'
], translate)
return code
def CodeInitialize(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name),
'%s->%s_num_allocated = 0;' % (name, self._name)]
return code
def CodeFree(self, structname):
code = self.CodeClear(structname);
code += TranslateList([
'free(%(structname)s->%(name)s_data);' ],
self.GetTranslation({'structname' : structname }))
return code
def Declaration(self):
dcl = ['%s *%s_data;' % (self._ctype, self._name),
'int %s_length;' % self._name,
'int %s_num_allocated;' % self._name ]
return dcl
def NormalizeLine(line):
global white
global cppcomment
line = cppcomment.sub('', line)
line = line.strip()
line = white.sub(' ', line)
return line
def ProcessOneEntry(factory, newstruct, entry):
optional = 0
array = 0
entry_type = ''
name = ''
tag = ''
tag_set = None
separator = ''
fixed_length = ''
tokens = entry.split(' ')
while tokens:
token = tokens[0]
tokens = tokens[1:]
if not entry_type:
if not optional and token == 'optional':
optional = 1
continue
if not array and token == 'array':
array = 1
continue
if not entry_type:
entry_type = token
continue
if not name:
res = re.match(r'^([^\[\]]+)(\[.*\])?$', token)
if not res:
raise RpcGenError(
'Cannot parse name: \"%s\" '
'around line %d' % (entry, line_count))
name = res.group(1)
fixed_length = res.group(2)
if fixed_length:
fixed_length = fixed_length[1:-1]
continue
if not separator:
separator = token
if separator != '=':
raise RpcGenError('Expected "=" after name \"%s\" got %s'
% (name, token))
continue
if not tag_set:
tag_set = 1
if not re.match(r'^(0x)?[0-9]+$', token):
raise RpcGenError('Expected tag number: \"%s\"' % entry)
tag = int(token, 0)
continue
raise RpcGenError('Cannot parse \"%s\"' % entry)
if not tag_set:
raise RpcGenError('Need tag number: \"%s\"' % entry)
# Create the right entry
if entry_type == 'bytes':
if fixed_length:
newentry = factory.EntryBytes(entry_type, name, tag, fixed_length)
else:
newentry = factory.EntryVarBytes(entry_type, name, tag)
elif entry_type == 'int' and not fixed_length:
newentry = factory.EntryInt(entry_type, name, tag)
elif entry_type == 'int64' and not fixed_length:
newentry = factory.EntryInt(entry_type, name, tag, bits=64)
elif entry_type == 'string' and not fixed_length:
newentry = factory.EntryString(entry_type, name, tag)
else:
res = structref.match(entry_type)
if res:
# References another struct defined in our file
newentry = factory.EntryStruct(entry_type, name, tag, res.group(1))
else:
raise RpcGenError('Bad type: "%s" in "%s"' % (entry_type, entry))
structs = []
if optional:
newentry.MakeOptional()
if array:
newentry.MakeArray()
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.Verify()
if array:
# We need to encapsulate this entry into a struct
newname = newentry.Name()+ '_array'
# Now borgify the new entry.
newentry = factory.EntryArray(newentry)
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.MakeArray()
newstruct.AddEntry(newentry)
return structs
def ProcessStruct(factory, data):
tokens = data.split(' ')
# First three tokens are: 'struct' 'name' '{'
newstruct = factory.Struct(tokens[1])
inside = ' '.join(tokens[3:-1])
tokens = inside.split(';')
structs = []
for entry in tokens:
entry = NormalizeLine(entry)
if not entry:
continue
# It's possible that new structs get defined in here
structs.extend(ProcessOneEntry(factory, newstruct, entry))
structs.append(newstruct)
return structs
def GetNextStruct(file):
global line_count
global cppdirect
got_struct = 0
processed_lines = []
have_c_comment = 0
data = ''
while 1:
line = file.readline()
if not line:
break
line_count += 1
line = line[:-1]
if not have_c_comment and re.search(r'/\*', line):
if re.search(r'/\*.*?\*/', line):
line = re.sub(r'/\*.*?\*/', '', line)
else:
line = re.sub(r'/\*.*$', '', line)
have_c_comment = 1
if have_c_comment:
if not re.search(r'\*/', line):
continue
have_c_comment = 0
line = re.sub(r'^.*\*/', '', line)
line = NormalizeLine(line)
if not line:
continue
if not got_struct:
if re.match(r'#include ["<].*[>"]', line):
cppdirect.append(line)
continue
if re.match(r'^#(if( |def)|endif)', line):
cppdirect.append(line)
continue
if re.match(r'^#define', line):
headerdirect.append(line)
continue
if not structdef.match(line):
raise RpcGenError('Missing struct on line %d: %s'
% (line_count, line))
else:
got_struct = 1
data += line
continue
# We are inside the struct
tokens = line.split('}')
if len(tokens) == 1:
data += ' ' + line
continue
if len(tokens[1]):
raise RpcGenError('Trailing garbage after struct on line %d'
% line_count)
# We found the end of the struct
data += ' %s}' % tokens[0]
break
# Remove any comments, that might be in there
data = re.sub(r'/\*.*\*/', '', data)
return data
def Parse(factory, file):
"""
Parses the input file and returns C code and corresponding header file.
"""
entities = []
while 1:
# Just gets the whole struct nicely formatted
data = GetNextStruct(file)
if not data:
break
entities.extend(ProcessStruct(factory, data))
return entities
class CCodeGenerator:
def __init__(self):
pass
def GuardName(self, name):
# Use the complete provided path to the input file, with all
# non-identifier characters replaced with underscores, to
# reduce the chance of a collision between guard macros.
return 'EVENT_RPCOUT_' + nonident.sub('_', name).upper() + '_'
def HeaderPreamble(self, name):
guard = self.GuardName(name)
pre = (
'/*\n'
' * Automatically generated from %s\n'
' */\n\n'
'#ifndef %s\n'
'#define %s\n\n' ) % (
name, guard, guard)
for statement in headerdirect:
pre += '%s\n' % statement
if headerdirect:
pre += '\n'
pre += (
'#include <event2/util.h> /* for ev_uint*_t */\n'
'#include <event2/rpc.h>\n'
)
return pre
def HeaderPostamble(self, name):
guard = self.GuardName(name)
return '#endif /* %s */' % guard
def BodyPreamble(self, name, header_file):
global _NAME
global _VERSION
slash = header_file.rfind('/')
if slash != -1:
header_file = header_file[slash+1:]
pre = ( '/*\n'
' * Automatically generated from %s\n'
' * by %s/%s. DO NOT EDIT THIS FILE.\n'
' */\n\n' ) % (name, _NAME, _VERSION)
pre += ( '#include <stdlib.h>\n'
'#include <string.h>\n'
'#include <assert.h>\n'
'#include <event2/event-config.h>\n'
'#include <event2/event.h>\n'
'#include <event2/buffer.h>\n'
'#include <event2/tag.h>\n\n'
'#if defined(EVENT____func__) && !defined(__func__)\n'
'#define __func__ EVENT____func__\n'
'#endif\n\n'
)
for statement in cppdirect:
pre += '%s\n' % statement
pre += '\n#include "%s"\n\n' % header_file
pre += 'void event_warn(const char *fmt, ...);\n'
pre += 'void event_warnx(const char *fmt, ...);\n\n'
return pre
def HeaderFilename(self, filename):
return '.'.join(filename.split('.')[:-1]) + '.h'
def CodeFilename(self, filename):
return '.'.join(filename.split('.')[:-1]) + '.gen.c'
def Struct(self, name):
return StructCCode(name)
def EntryBytes(self, entry_type, name, tag, fixed_length):
return EntryBytes(entry_type, name, tag, fixed_length)
def EntryVarBytes(self, entry_type, name, tag):
return EntryVarBytes(entry_type, name, tag)
def EntryInt(self, entry_type, name, tag, bits=32):
return EntryInt(entry_type, name, tag, bits)
def EntryString(self, entry_type, name, tag):
return EntryString(entry_type, name, tag)
def EntryStruct(self, entry_type, name, tag, struct_name):
return EntryStruct(entry_type, name, tag, struct_name)
def EntryArray(self, entry):
return EntryArray(entry)
class Usage(RpcGenError):
def __init__(self, argv0):
RpcGenError.__init__("usage: %s input.rpc [[output.h] output.c]"
% argv0)
class CommandLine:
def __init__(self, argv):
"""Initialize a command-line to launch event_rpcgen, as if
from a command-line with CommandLine(sys.argv). If you're
calling this directly, remember to provide a dummy value
for sys.argv[0]
"""
self.filename = None
self.header_file = None
self.impl_file = None
self.factory = CCodeGenerator()
if len(argv) >= 2 and argv[1] == '--quiet':
global QUIETLY
QUIETLY = 1
del argv[1]
if len(argv) < 2 or len(argv) > 4:
raise Usage(argv[0])
self.filename = argv[1].replace('\\', '/')
if len(argv) == 3:
self.impl_file = argv[2].replace('\\', '/')
if len(argv) == 4:
self.header_file = argv[2].replace('\\', '/')
self.impl_file = argv[3].replace('\\', '/')
if not self.filename:
raise Usage(argv[0])
if not self.impl_file:
self.impl_file = self.factory.CodeFilename(self.filename)
if not self.header_file:
self.header_file = self.factory.HeaderFilename(self.impl_file)
if not self.impl_file.endswith('.c'):
raise RpcGenError("can only generate C implementation files")
if not self.header_file.endswith('.h'):
raise RpcGenError("can only generate C header files")
def run(self):
filename = self.filename
header_file = self.header_file
impl_file = self.impl_file
factory = self.factory
declare('Reading \"%s\"' % filename)
fp = open(filename, 'r')
entities = Parse(factory, fp)
fp.close()
declare('... creating "%s"' % header_file)
header_fp = open(header_file, 'w')
header_fp.write(factory.HeaderPreamble(filename))
# Create forward declarations: allows other structs to reference
# each other
for entry in entities:
entry.PrintForwardDeclaration(header_fp)
header_fp.write('\n')
for entry in entities:
entry.PrintTags(header_fp)
entry.PrintDeclaration(header_fp)
header_fp.write(factory.HeaderPostamble(filename))
header_fp.close()
declare('... creating "%s"' % impl_file)
impl_fp = open(impl_file, 'w')
impl_fp.write(factory.BodyPreamble(filename, header_file))
for entry in entities:
entry.PrintCode(impl_fp)
impl_fp.close()
if __name__ == '__main__':
try:
CommandLine(sys.argv).run()
sys.exit(0)
except RpcGenError as e:
sys.stderr.write(e)
sys.exit(1)
except EnvironmentError as e:
if e.filename and e.strerror:
sys.stderr.write("%s: %s" % (e.filename, e.strerror))
sys.exit(1)
elif e.strerror:
sys.stderr.write(e.strerror)
sys.exit(1)
else:
raise
|
hurricup/intellij-community
|
refs/heads/master
|
python/helpers/coverage_runner/__init__.py
|
981
|
__author__ = 'traff'
|
fidomason/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/distutils/command/check.py
|
78
|
"""distutils.command.check
Implements the Distutils 'check' command.
"""
from distutils.core import Command
from distutils.errors import DistutilsSetupError
try:
# docutils is installed
from docutils.utils import Reporter
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
from io import StringIO
class SilentReporter(Reporter):
def __init__(self, source, report_level, halt_level, stream=None,
debug=0, encoding='ascii', error_handler='replace'):
self.messages = []
Reporter.__init__(self, source, report_level, halt_level, stream,
debug, encoding, error_handler)
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
return nodes.system_message(message, level=level,
type=self.levels[level],
*children, **kwargs)
HAS_DOCUTILS = True
except Exception:
# Catch all exceptions because exceptions besides ImportError probably
# indicate that docutils is not ported to Py3k.
HAS_DOCUTILS = False
class check(Command):
"""This command checks the meta-data of the package.
"""
description = ("perform some checks on the package")
user_options = [('metadata', 'm', 'Verify meta-data'),
('restructuredtext', 'r',
('Checks if long string meta-data syntax '
'are reStructuredText-compliant')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'restructuredtext', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
def finalize_options(self):
pass
def warn(self, msg):
"""Counts the number of warnings that occurs."""
self._warnings += 1
return Command.warn(self, msg)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and self._warnings > 0:
raise DistutilsSetupError('Please correct your package.')
def check_metadata(self):
"""Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.
"""
metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if not (hasattr(metadata, attr) and getattr(metadata, attr)):
missing.append(attr)
if missing:
self.warn("missing required meta-data: %s" % ', '.join(missing))
if metadata.author:
if not metadata.author_email:
self.warn("missing meta-data: if 'author' supplied, " +
"'author_email' must be supplied too")
elif metadata.maintainer:
if not metadata.maintainer_email:
self.warn("missing meta-data: if 'maintainer' supplied, " +
"'maintainer_email' must be supplied too")
else:
self.warn("missing meta-data: either (author and author_email) " +
"or (maintainer and maintainer_email) " +
"must be supplied")
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
data = self.distribution.get_long_description()
for warning in self._check_rst_data(data):
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path,
settings.report_level,
settings.halt_level,
stream=settings.warning_stream,
debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append((-1, 'Could not finish the parsing.',
'', {}))
return reporter.messages
|
phonnz/azure-storage-python
|
refs/heads/master
|
tests/doctest_queueservice.py
|
2
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
"""
How To: Create a Queue
----------------------
>>> from azure.storage.queue import QueueService
>>> queue_service = QueueService(name, key)
>>> queue_service.create_queue('taskqueue')
True
How To: Insert a Message into a Queue
-------------------------------------
>>> queue_service.put_message('taskqueue', 'Hello World')
How To: Peek at the Next Message
--------------------------------
>>> messages = queue_service.peek_messages('taskqueue')
>>> for message in messages:
... print(message.message_text)
...
Hello World
How To: Dequeue the Next Message
--------------------------------
>>> messages = queue_service.get_messages('taskqueue')
>>> for message in messages:
... print(message.message_text)
... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
Hello World
How To: Change the Contents of a Queued Message
-----------------------------------------------
>>> queue_service.put_message('taskqueue', 'Hello World')
>>> messages = queue_service.get_messages('taskqueue')
>>> for message in messages:
... res = queue_service.update_message('taskqueue', message.message_id, 'Hello World Again', message.pop_receipt, 0)
How To: Additional Options for Dequeuing Messages
-------------------------------------------------
>>> queue_service.put_message('taskqueue', 'Hello World')
>>> messages = queue_service.get_messages('taskqueue', numofmessages=16, visibilitytimeout=5*60)
>>> for message in messages:
... print(message.message_text)
... queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
Hello World Again
Hello World
How To: Get the Queue Length
----------------------------
>>> queue_metadata = queue_service.get_queue_metadata('taskqueue')
>>> count = queue_metadata['x-ms-approximate-messages-count']
>>> count
'0'
How To: Delete a Queue
----------------------
>>> queue_service.delete_queue('taskqueue')
True
"""
import tests.storage_settings_real as settings
name = settings.STORAGE_ACCOUNT_NAME
key = settings.STORAGE_ACCOUNT_KEY
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Paul-Ezell/cinder-1
|
refs/heads/master
|
cinder/api/v2/volume_metadata.py
|
15
|
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from cinder.api import common
from cinder.api.openstack import wsgi
from cinder import exception
from cinder.i18n import _
from cinder import volume
class Controller(wsgi.Controller):
"""The volume metadata API controller for the OpenStack API."""
def __init__(self):
self.volume_api = volume.API()
super(Controller, self).__init__()
def _get_metadata(self, context, volume_id):
# The metadata is at the second position of the tuple returned
# from _get_volume_and_metadata
return self._get_volume_and_metadata(context, volume_id)[1]
def _get_volume_and_metadata(self, context, volume_id):
try:
volume = self.volume_api.get(context, volume_id)
meta = self.volume_api.get_volume_metadata(context, volume)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
return (volume, meta)
@wsgi.serializers(xml=common.MetadataTemplate)
def index(self, req, volume_id):
"""Returns the list of metadata for a given volume."""
context = req.environ['cinder.context']
return {'metadata': self._get_metadata(context, volume_id)}
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def create(self, req, volume_id, body):
self.assert_valid_body(body, 'metadata')
context = req.environ['cinder.context']
metadata = body['metadata']
new_metadata = self._update_volume_metadata(context,
volume_id,
metadata,
delete=False)
return {'metadata': new_metadata}
@wsgi.serializers(xml=common.MetaItemTemplate)
@wsgi.deserializers(xml=common.MetaItemDeserializer)
def update(self, req, volume_id, id, body):
self.assert_valid_body(body, 'meta')
meta_item = body['meta']
if id not in meta_item:
expl = _('Request body and URI mismatch')
raise webob.exc.HTTPBadRequest(explanation=expl)
if len(meta_item) > 1:
expl = _('Request body contains too many items')
raise webob.exc.HTTPBadRequest(explanation=expl)
context = req.environ['cinder.context']
self._update_volume_metadata(context,
volume_id,
meta_item,
delete=False)
return {'meta': meta_item}
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def update_all(self, req, volume_id, body):
self.assert_valid_body(body, 'metadata')
metadata = body['metadata']
context = req.environ['cinder.context']
new_metadata = self._update_volume_metadata(context,
volume_id,
metadata,
delete=True)
return {'metadata': new_metadata}
def _update_volume_metadata(self, context,
volume_id, metadata,
delete=False):
try:
volume = self.volume_api.get(context, volume_id)
return self.volume_api.update_volume_metadata(
context,
volume,
metadata,
delete,
meta_type=common.METADATA_TYPES.user)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
except (ValueError, AttributeError):
msg = _("Malformed request body")
raise webob.exc.HTTPBadRequest(explanation=msg)
except exception.InvalidVolumeMetadata as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
except exception.InvalidVolumeMetadataSize as error:
raise webob.exc.HTTPRequestEntityTooLarge(explanation=error.msg)
@wsgi.serializers(xml=common.MetaItemTemplate)
def show(self, req, volume_id, id):
"""Return a single metadata item."""
context = req.environ['cinder.context']
data = self._get_metadata(context, volume_id)
try:
return {'meta': {id: data[id]}}
except KeyError:
msg = _("Metadata item was not found")
raise webob.exc.HTTPNotFound(explanation=msg)
def delete(self, req, volume_id, id):
"""Deletes an existing metadata."""
context = req.environ['cinder.context']
volume, metadata = self._get_volume_and_metadata(context, volume_id)
if id not in metadata:
msg = _("Metadata item was not found")
raise webob.exc.HTTPNotFound(explanation=msg)
try:
self.volume_api.delete_volume_metadata(
context,
volume,
id,
meta_type=common.METADATA_TYPES.user)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
return webob.Response(status_int=200)
def create_resource():
return wsgi.Resource(Controller())
|
movmov/cc
|
refs/heads/master
|
vendor/Twisted-10.0.0/twisted/protocols/sip.py
|
59
|
# -*- test-case-name: twisted.test.test_sip -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Session Initialization Protocol.
Documented in RFC 2543.
[Superceded by 3261]
This module contains a deprecated implementation of HTTP Digest authentication.
See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home.
"""
# system imports
import socket, time, sys, random, warnings
from zope.interface import implements, Interface
# twisted imports
from twisted.python import log, util
from twisted.python.deprecate import deprecated
from twisted.python.versions import Version
from twisted.python.hashlib import md5
from twisted.internet import protocol, defer, reactor
from twisted import cred
import twisted.cred.error
from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword
# sibling imports
from twisted.protocols import basic
PORT = 5060
# SIP headers have short forms
shortHeaders = {"call-id": "i",
"contact": "m",
"content-encoding": "e",
"content-length": "l",
"content-type": "c",
"from": "f",
"subject": "s",
"to": "t",
"via": "v",
}
longHeaders = {}
for k, v in shortHeaders.items():
longHeaders[v] = k
del k, v
statusCodes = {
100: "Trying",
180: "Ringing",
181: "Call Is Being Forwarded",
182: "Queued",
183: "Session Progress",
200: "OK",
300: "Multiple Choices",
301: "Moved Permanently",
302: "Moved Temporarily",
303: "See Other",
305: "Use Proxy",
380: "Alternative Service",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict", # Not in RFC3261
410: "Gone",
411: "Length Required", # Not in RFC3261
413: "Request Entity Too Large",
414: "Request-URI Too Large",
415: "Unsupported Media Type",
416: "Unsupported URI Scheme",
420: "Bad Extension",
421: "Extension Required",
423: "Interval Too Brief",
480: "Temporarily Unavailable",
481: "Call/Transaction Does Not Exist",
482: "Loop Detected",
483: "Too Many Hops",
484: "Address Incomplete",
485: "Ambiguous",
486: "Busy Here",
487: "Request Terminated",
488: "Not Acceptable Here",
491: "Request Pending",
493: "Undecipherable",
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway", # no donut
503: "Service Unavailable",
504: "Server Time-out",
505: "SIP Version not supported",
513: "Message Too Large",
600: "Busy Everywhere",
603: "Decline",
604: "Does not exist anywhere",
606: "Not Acceptable",
}
specialCases = {
'cseq': 'CSeq',
'call-id': 'Call-ID',
'www-authenticate': 'WWW-Authenticate',
}
def dashCapitalize(s):
''' Capitalize a string, making sure to treat - as a word seperator '''
return '-'.join([ x.capitalize() for x in s.split('-')])
def unq(s):
if s[0] == s[-1] == '"':
return s[1:-1]
return s
def DigestCalcHA1(
pszAlg,
pszUserName,
pszRealm,
pszPassword,
pszNonce,
pszCNonce,
):
m = md5()
m.update(pszUserName)
m.update(":")
m.update(pszRealm)
m.update(":")
m.update(pszPassword)
HA1 = m.digest()
if pszAlg == "md5-sess":
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
m.update(pszCNonce)
HA1 = m.digest()
return HA1.encode('hex')
DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1)
def DigestCalcResponse(
HA1,
pszNonce,
pszNonceCount,
pszCNonce,
pszQop,
pszMethod,
pszDigestUri,
pszHEntity,
):
m = md5()
m.update(pszMethod)
m.update(":")
m.update(pszDigestUri)
if pszQop == "auth-int":
m.update(":")
m.update(pszHEntity)
HA2 = m.digest().encode('hex')
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
if pszNonceCount and pszCNonce: # pszQop:
m.update(pszNonceCount)
m.update(":")
m.update(pszCNonce)
m.update(":")
m.update(pszQop)
m.update(":")
m.update(HA2)
hash = m.digest().encode('hex')
return hash
DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse)
_absent = object()
class Via(object):
"""
A L{Via} is a SIP Via header, representing a segment of the path taken by
the request.
See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42.
@ivar transport: Network protocol used for this leg. (Probably either "TCP"
or "UDP".)
@type transport: C{str}
@ivar branch: Unique identifier for this request.
@type branch: C{str}
@ivar host: Hostname or IP for this leg.
@type host: C{str}
@ivar port: Port used for this leg.
@type port C{int}, or None.
@ivar rportRequested: Whether to request RFC 3581 client processing or not.
@type rportRequested: C{bool}
@ivar rportValue: Servers wishing to honor requests for RFC 3581 processing
should set this parameter to the source port the request was received
from.
@type rportValue: C{int}, or None.
@ivar ttl: Time-to-live for requests on multicast paths.
@type ttl: C{int}, or None.
@ivar maddr: The destination multicast address, if any.
@type maddr: C{str}, or None.
@ivar hidden: Obsolete in SIP 2.0.
@type hidden: C{bool}
@ivar otherParams: Any other parameters in the header.
@type otherParams: C{dict}
"""
def __init__(self, host, port=PORT, transport="UDP", ttl=None,
hidden=False, received=None, rport=_absent, branch=None,
maddr=None, **kw):
"""
Set parameters of this Via header. All arguments correspond to
attributes of the same name.
To maintain compatibility with old SIP
code, the 'rport' argument is used to determine the values of
C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set
to True. (The deprecated method for doing this is to pass True.) If an
integer, C{rportValue} is set to the given value.
Any arguments not explicitly named here are collected into the
C{otherParams} dict.
"""
self.transport = transport
self.host = host
self.port = port
self.ttl = ttl
self.hidden = hidden
self.received = received
if rport is True:
warnings.warn(
"rport=True is deprecated since Twisted 9.0.",
DeprecationWarning,
stacklevel=2)
self.rportValue = None
self.rportRequested = True
elif rport is None:
self.rportValue = None
self.rportRequested = True
elif rport is _absent:
self.rportValue = None
self.rportRequested = False
else:
self.rportValue = rport
self.rportRequested = False
self.branch = branch
self.maddr = maddr
self.otherParams = kw
def _getrport(self):
"""
Returns the rport value expected by the old SIP code.
"""
if self.rportRequested == True:
return True
elif self.rportValue is not None:
return self.rportValue
else:
return None
def _setrport(self, newRPort):
"""
L{Base._fixupNAT} sets C{rport} directly, so this method sets
C{rportValue} based on that.
@param newRPort: The new rport value.
@type newRPort: C{int}
"""
self.rportValue = newRPort
self.rportRequested = False
rport = property(_getrport, _setrport)
def toString(self):
"""
Serialize this header for use in a request or response.
"""
s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port)
if self.hidden:
s += ";hidden"
for n in "ttl", "branch", "maddr", "received":
value = getattr(self, n)
if value is not None:
s += ";%s=%s" % (n, value)
if self.rportRequested:
s += ";rport"
elif self.rportValue is not None:
s += ";rport=%s" % (self.rport,)
etc = self.otherParams.items()
etc.sort()
for k, v in etc:
if v is None:
s += ";" + k
else:
s += ";%s=%s" % (k, v)
return s
def parseViaHeader(value):
"""
Parse a Via header.
@return: The parsed version of this header.
@rtype: L{Via}
"""
parts = value.split(";")
sent, params = parts[0], parts[1:]
protocolinfo, by = sent.split(" ", 1)
by = by.strip()
result = {}
pname, pversion, transport = protocolinfo.split("/")
if pname != "SIP" or pversion != "2.0":
raise ValueError, "wrong protocol or version: %r" % value
result["transport"] = transport
if ":" in by:
host, port = by.split(":")
result["port"] = int(port)
result["host"] = host
else:
result["host"] = by
for p in params:
# it's the comment-striping dance!
p = p.strip().split(" ", 1)
if len(p) == 1:
p, comment = p[0], ""
else:
p, comment = p
if p == "hidden":
result["hidden"] = True
continue
parts = p.split("=", 1)
if len(parts) == 1:
name, value = parts[0], None
else:
name, value = parts
if name in ("rport", "ttl"):
value = int(value)
result[name] = value
return Via(**result)
class URL:
"""A SIP URL."""
def __init__(self, host, username=None, password=None, port=None,
transport=None, usertype=None, method=None,
ttl=None, maddr=None, tag=None, other=None, headers=None):
self.username = username
self.host = host
self.password = password
self.port = port
self.transport = transport
self.usertype = usertype
self.method = method
self.tag = tag
self.ttl = ttl
self.maddr = maddr
if other == None:
self.other = []
else:
self.other = other
if headers == None:
self.headers = {}
else:
self.headers = headers
def toString(self):
l = []; w = l.append
w("sip:")
if self.username != None:
w(self.username)
if self.password != None:
w(":%s" % self.password)
w("@")
w(self.host)
if self.port != None:
w(":%d" % self.port)
if self.usertype != None:
w(";user=%s" % self.usertype)
for n in ("transport", "ttl", "maddr", "method", "tag"):
v = getattr(self, n)
if v != None:
w(";%s=%s" % (n, v))
for v in self.other:
w(";%s" % v)
if self.headers:
w("?")
w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()]))
return "".join(l)
def __str__(self):
return self.toString()
def __repr__(self):
return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport)
def parseURL(url, host=None, port=None):
"""Return string into URL object.
URIs are of of form 'sip:user@example.com'.
"""
d = {}
if not url.startswith("sip:"):
raise ValueError("unsupported scheme: " + url[:4])
parts = url[4:].split(";")
userdomain, params = parts[0], parts[1:]
udparts = userdomain.split("@", 1)
if len(udparts) == 2:
userpass, hostport = udparts
upparts = userpass.split(":", 1)
if len(upparts) == 1:
d["username"] = upparts[0]
else:
d["username"] = upparts[0]
d["password"] = upparts[1]
else:
hostport = udparts[0]
hpparts = hostport.split(":", 1)
if len(hpparts) == 1:
d["host"] = hpparts[0]
else:
d["host"] = hpparts[0]
d["port"] = int(hpparts[1])
if host != None:
d["host"] = host
if port != None:
d["port"] = port
for p in params:
if p == params[-1] and "?" in p:
d["headers"] = h = {}
p, headers = p.split("?", 1)
for header in headers.split("&"):
k, v = header.split("=")
h[k] = v
nv = p.split("=", 1)
if len(nv) == 1:
d.setdefault("other", []).append(p)
continue
name, value = nv
if name == "user":
d["usertype"] = value
elif name in ("transport", "ttl", "maddr", "method", "tag"):
if name == "ttl":
value = int(value)
d[name] = value
else:
d.setdefault("other", []).append(p)
return URL(**d)
def cleanRequestURL(url):
"""Clean a URL from a Request line."""
url.transport = None
url.maddr = None
url.ttl = None
url.headers = {}
def parseAddress(address, host=None, port=None, clean=0):
"""Return (name, uri, params) for From/To/Contact header.
@param clean: remove unnecessary info, usually for From and To headers.
"""
address = address.strip()
# simple 'sip:foo' case
if address.startswith("sip:"):
return "", parseURL(address, host=host, port=port), {}
params = {}
name, url = address.split("<", 1)
name = name.strip()
if name.startswith('"'):
name = name[1:]
if name.endswith('"'):
name = name[:-1]
url, paramstring = url.split(">", 1)
url = parseURL(url, host=host, port=port)
paramstring = paramstring.strip()
if paramstring:
for l in paramstring.split(";"):
if not l:
continue
k, v = l.split("=")
params[k] = v
if clean:
# rfc 2543 6.21
url.ttl = None
url.headers = {}
url.transport = None
url.maddr = None
return name, url, params
class SIPError(Exception):
def __init__(self, code, phrase=None):
if phrase is None:
phrase = statusCodes[code]
Exception.__init__(self, "SIP error (%d): %s" % (code, phrase))
self.code = code
self.phrase = phrase
class RegistrationError(SIPError):
"""Registration was not possible."""
class Message:
"""A SIP message."""
length = None
def __init__(self):
self.headers = util.OrderedDict() # map name to list of values
self.body = ""
self.finished = 0
def addHeader(self, name, value):
name = name.lower()
name = longHeaders.get(name, name)
if name == "content-length":
self.length = int(value)
self.headers.setdefault(name,[]).append(value)
def bodyDataReceived(self, data):
self.body += data
def creationFinished(self):
if (self.length != None) and (self.length != len(self.body)):
raise ValueError, "wrong body length"
self.finished = 1
def toString(self):
s = "%s\r\n" % self._getHeaderLine()
for n, vs in self.headers.items():
for v in vs:
s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v)
s += "\r\n"
s += self.body
return s
def _getHeaderLine(self):
raise NotImplementedError
class Request(Message):
"""A Request for a URI"""
def __init__(self, method, uri, version="SIP/2.0"):
Message.__init__(self)
self.method = method
if isinstance(uri, URL):
self.uri = uri
else:
self.uri = parseURL(uri)
cleanRequestURL(self.uri)
def __repr__(self):
return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString())
def _getHeaderLine(self):
return "%s %s SIP/2.0" % (self.method, self.uri.toString())
class Response(Message):
"""A Response to a URI Request"""
def __init__(self, code, phrase=None, version="SIP/2.0"):
Message.__init__(self)
self.code = code
if phrase == None:
phrase = statusCodes[code]
self.phrase = phrase
def __repr__(self):
return "<SIP Response %d:%s>" % (id(self), self.code)
def _getHeaderLine(self):
return "SIP/2.0 %s %s" % (self.code, self.phrase)
class MessagesParser(basic.LineReceiver):
"""A SIP messages parser.
Expects dataReceived, dataDone repeatedly,
in that order. Shouldn't be connected to actual transport.
"""
version = "SIP/2.0"
acceptResponses = 1
acceptRequests = 1
state = "firstline" # or "headers", "body" or "invalid"
debug = 0
def __init__(self, messageReceivedCallback):
self.messageReceived = messageReceivedCallback
self.reset()
def reset(self, remainingData=""):
self.state = "firstline"
self.length = None # body length
self.bodyReceived = 0 # how much of the body we received
self.message = None
self.setLineMode(remainingData)
def invalidMessage(self):
self.state = "invalid"
self.setRawMode()
def dataDone(self):
# clear out any buffered data that may be hanging around
self.clearLineBuffer()
if self.state == "firstline":
return
if self.state != "body":
self.reset()
return
if self.length == None:
# no content-length header, so end of data signals message done
self.messageDone()
elif self.length < self.bodyReceived:
# aborted in the middle
self.reset()
else:
# we have enough data and message wasn't finished? something is wrong
raise RuntimeError, "this should never happen"
def dataReceived(self, data):
try:
basic.LineReceiver.dataReceived(self, data)
except:
log.err()
self.invalidMessage()
def handleFirstLine(self, line):
"""Expected to create self.message."""
raise NotImplementedError
def lineLengthExceeded(self, line):
self.invalidMessage()
def lineReceived(self, line):
if self.state == "firstline":
while line.startswith("\n") or line.startswith("\r"):
line = line[1:]
if not line:
return
try:
a, b, c = line.split(" ", 2)
except ValueError:
self.invalidMessage()
return
if a == "SIP/2.0" and self.acceptResponses:
# response
try:
code = int(b)
except ValueError:
self.invalidMessage()
return
self.message = Response(code, c)
elif c == "SIP/2.0" and self.acceptRequests:
self.message = Request(a, b)
else:
self.invalidMessage()
return
self.state = "headers"
return
else:
assert self.state == "headers"
if line:
# XXX support multi-line headers
try:
name, value = line.split(":", 1)
except ValueError:
self.invalidMessage()
return
self.message.addHeader(name, value.lstrip())
if name.lower() == "content-length":
try:
self.length = int(value.lstrip())
except ValueError:
self.invalidMessage()
return
else:
# CRLF, we now have message body until self.length bytes,
# or if no length was given, until there is no more data
# from the connection sending us data.
self.state = "body"
if self.length == 0:
self.messageDone()
return
self.setRawMode()
def messageDone(self, remainingData=""):
assert self.state == "body"
self.message.creationFinished()
self.messageReceived(self.message)
self.reset(remainingData)
def rawDataReceived(self, data):
assert self.state in ("body", "invalid")
if self.state == "invalid":
return
if self.length == None:
self.message.bodyDataReceived(data)
else:
dataLen = len(data)
expectedLen = self.length - self.bodyReceived
if dataLen > expectedLen:
self.message.bodyDataReceived(data[:expectedLen])
self.messageDone(data[expectedLen:])
return
else:
self.bodyReceived += dataLen
self.message.bodyDataReceived(data)
if self.bodyReceived == self.length:
self.messageDone()
class Base(protocol.DatagramProtocol):
"""Base class for SIP clients and servers."""
PORT = PORT
debug = False
def __init__(self):
self.messages = []
self.parser = MessagesParser(self.addMessage)
def addMessage(self, msg):
self.messages.append(msg)
def datagramReceived(self, data, addr):
self.parser.dataReceived(data)
self.parser.dataDone()
for m in self.messages:
self._fixupNAT(m, addr)
if self.debug:
log.msg("Received %r from %r" % (m.toString(), addr))
if isinstance(m, Request):
self.handle_request(m, addr)
else:
self.handle_response(m, addr)
self.messages[:] = []
def _fixupNAT(self, message, (srcHost, srcPort)):
# RFC 2543 6.40.2,
senderVia = parseViaHeader(message.headers["via"][0])
if senderVia.host != srcHost:
senderVia.received = srcHost
if senderVia.port != srcPort:
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
elif senderVia.rport == True:
senderVia.received = srcHost
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def sendMessage(self, destURL, message):
"""Send a message.
@param destURL: C{URL}. This should be a *physical* URL, not a logical one.
@param message: The message to send.
"""
if destURL.transport not in ("udp", None):
raise RuntimeError, "only UDP currently supported"
if self.debug:
log.msg("Sending %r to %r" % (message.toString(), destURL))
self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT))
def handle_request(self, message, addr):
"""Override to define behavior for requests received
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
def handle_response(self, message, addr):
"""Override to define behavior for responses received.
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
class IContact(Interface):
"""A user of a registrar or proxy"""
class Registration:
def __init__(self, secondsToExpiry, contactURL):
self.secondsToExpiry = secondsToExpiry
self.contactURL = contactURL
class IRegistry(Interface):
"""Allows registration of logical->physical URL mapping."""
def registerAddress(domainURL, logicalURL, physicalURL):
"""Register the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def unregisterAddress(domainURL, logicalURL, physicalURL):
"""Unregister the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def getRegistrationInfo(logicalURL):
"""Get registration info for logical URL.
@return: Deferred of C{Registration} object or failure of LookupError.
"""
class ILocator(Interface):
"""Allow looking up physical address for logical URL."""
def getAddress(logicalURL):
"""Return physical URL of server for logical URL of user.
@param logicalURL: a logical C{URL}.
@return: Deferred which becomes URL or fails with LookupError.
"""
class Proxy(Base):
"""SIP proxy."""
PORT = PORT
locator = None # object implementing ILocator
def __init__(self, host=None, port=PORT):
"""Create new instance.
@param host: our hostname/IP as set in Via headers.
@param port: our port as set in Via headers.
"""
self.host = host or socket.getfqdn()
self.port = port
Base.__init__(self)
def getVia(self):
"""Return value of Via header for this proxy."""
return Via(host=self.host, port=self.port)
def handle_request(self, message, addr):
# send immediate 100/trying message before processing
#self.deliverResponse(self.responseFromRequest(100, message))
f = getattr(self, "handle_%s_request" % message.method, None)
if f is None:
f = self.handle_request_default
try:
d = f(message, addr)
except SIPError, e:
self.deliverResponse(self.responseFromRequest(e.code, message))
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
if d is not None:
d.addErrback(lambda e:
self.deliverResponse(self.responseFromRequest(e.code, message))
)
def handle_request_default(self, message, (srcHost, srcPort)):
"""Default request handler.
Default behaviour for OPTIONS and unknown methods for proxies
is to forward message on to the client.
Since at the moment we are stateless proxy, thats basically
everything.
"""
def _mungContactHeader(uri, message):
message.headers['contact'][0] = uri.toString()
return self.sendMessage(uri, message)
viaHeader = self.getVia()
if viaHeader.toString() in message.headers["via"]:
# must be a loop, so drop message
log.msg("Dropping looped message.")
return
message.headers["via"].insert(0, viaHeader.toString())
name, uri, tags = parseAddress(message.headers["to"][0], clean=1)
# this is broken and needs refactoring to use cred
d = self.locator.getAddress(uri)
d.addCallback(self.sendMessage, message)
d.addErrback(self._cantForwardRequest, message)
def _cantForwardRequest(self, error, message):
error.trap(LookupError)
del message.headers["via"][0] # this'll be us
self.deliverResponse(self.responseFromRequest(404, message))
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def handle_response(self, message, addr):
"""Default response handler."""
v = parseViaHeader(message.headers["via"][0])
if (v.host, v.port) != (self.host, self.port):
# we got a message not intended for us?
# XXX note this check breaks if we have multiple external IPs
# yay for suck protocols
log.msg("Dropping incorrectly addressed message")
return
del message.headers["via"][0]
if not message.headers["via"]:
# this message is addressed to us
self.gotResponse(message, addr)
return
self.deliverResponse(message)
def gotResponse(self, message, addr):
"""Called with responses that are addressed at this server."""
pass
class IAuthorizer(Interface):
def getChallenge(peer):
"""Generate a challenge the client may respond to.
@type peer: C{tuple}
@param peer: The client's address
@rtype: C{str}
@return: The challenge string
"""
def decode(response):
"""Create a credentials object from the given response.
@type response: C{str}
"""
class BasicAuthorizer:
"""Authorizer for insecure Basic (base64-encoded plaintext) authentication.
This form of authentication is broken and insecure. Do not use it.
"""
implements(IAuthorizer)
def __init__(self):
"""
This method exists solely to issue a deprecation warning.
"""
warnings.warn(
"twisted.protocols.sip.BasicAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
def getChallenge(self, peer):
return None
def decode(self, response):
# At least one SIP client improperly pads its Base64 encoded messages
for i in range(3):
try:
creds = (response + ('=' * i)).decode('base64')
except:
pass
else:
break
else:
# Totally bogus
raise SIPError(400)
p = creds.split(':', 1)
if len(p) == 2:
return UsernamePassword(*p)
raise SIPError(400)
class DigestedCredentials(UsernameHashedPassword):
"""Yet Another Simple Digest-MD5 authentication scheme"""
def __init__(self, username, fields, challenges):
warnings.warn(
"twisted.protocols.sip.DigestedCredentials was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.username = username
self.fields = fields
self.challenges = challenges
def checkPassword(self, password):
method = 'REGISTER'
response = self.fields.get('response')
uri = self.fields.get('uri')
nonce = self.fields.get('nonce')
cnonce = self.fields.get('cnonce')
nc = self.fields.get('nc')
algo = self.fields.get('algorithm', 'MD5')
qop = self.fields.get('qop-options', 'auth')
opaque = self.fields.get('opaque')
if opaque not in self.challenges:
return False
del self.challenges[opaque]
user, domain = self.username.split('@', 1)
if uri is None:
uri = 'sip:' + domain
expected = DigestCalcResponse(
DigestCalcHA1(algo, user, domain, password, nonce, cnonce),
nonce, nc, cnonce, qop, method, uri, None,
)
return expected == response
class DigestAuthorizer:
CHALLENGE_LIFETIME = 15
implements(IAuthorizer)
def __init__(self):
warnings.warn(
"twisted.protocols.sip.DigestAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.outstanding = {}
def generateNonce(self):
c = tuple([random.randrange(sys.maxint) for _ in range(3)])
c = '%d%d%d' % c
return c
def generateOpaque(self):
return str(random.randrange(sys.maxint))
def getChallenge(self, peer):
c = self.generateNonce()
o = self.generateOpaque()
self.outstanding[o] = c
return ','.join((
'nonce="%s"' % c,
'opaque="%s"' % o,
'qop-options="auth"',
'algorithm="MD5"',
))
def decode(self, response):
response = ' '.join(response.splitlines())
parts = response.split(',')
auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]])
try:
username = auth['username']
except KeyError:
raise SIPError(401)
try:
return DigestedCredentials(username, auth, self.outstanding)
except:
raise SIPError(400)
class RegisterProxy(Proxy):
"""A proxy that allows registration for a specific domain.
Unregistered users won't be handled.
"""
portal = None
registry = None # should implement IRegistry
authorizers = {}
def __init__(self, *args, **kw):
Proxy.__init__(self, *args, **kw)
self.liveChallenges = {}
if "digest" not in self.authorizers:
self.authorizers["digest"] = DigestAuthorizer()
def handle_ACK_request(self, message, (host, port)):
# XXX
# ACKs are a client's way of indicating they got the last message
# Responding to them is not a good idea.
# However, we should keep track of terminal messages and re-transmit
# if no ACK is received.
pass
def handle_REGISTER_request(self, message, (host, port)):
"""Handle a registration request.
Currently registration is not proxied.
"""
if self.portal is None:
# There is no portal. Let anyone in.
self.register(message, host, port)
else:
# There is a portal. Check for credentials.
if not message.headers.has_key("authorization"):
return self.unauthorized(message, host, port)
else:
return self.login(message, host, port)
def unauthorized(self, message, host, port):
m = self.responseFromRequest(401, message)
for (scheme, auth) in self.authorizers.iteritems():
chal = auth.getChallenge((host, port))
if chal is None:
value = '%s realm="%s"' % (scheme.title(), self.host)
else:
value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host)
m.headers.setdefault('www-authenticate', []).append(value)
self.deliverResponse(m)
def login(self, message, host, port):
parts = message.headers['authorization'][0].split(None, 1)
a = self.authorizers.get(parts[0].lower())
if a:
try:
c = a.decode(parts[1])
except SIPError:
raise
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
c.username += '@' + self.host
self.portal.login(c, None, IContact
).addCallback(self._cbLogin, message, host, port
).addErrback(self._ebLogin, message, host, port
).addErrback(log.err
)
else:
self.deliverResponse(self.responseFromRequest(501, message))
def _cbLogin(self, (i, a, l), message, host, port):
# It's stateless, matey. What a joke.
self.register(message, host, port)
def _ebLogin(self, failure, message, host, port):
failure.trap(cred.error.UnauthorizedLogin)
self.unauthorized(message, host, port)
def register(self, message, host, port):
"""Allow all users to register"""
name, toURL, params = parseAddress(message.headers["to"][0], clean=1)
contact = None
if message.headers.has_key("contact"):
contact = message.headers["contact"][0]
if message.headers.get("expires", [None])[0] == "0":
self.unregister(message, toURL, contact)
else:
# XXX Check expires on appropriate URL, and pass it to registry
# instead of having registry hardcode it.
if contact is not None:
name, contactURL, params = parseAddress(contact, host=host, port=port)
d = self.registry.registerAddress(message.uri, toURL, contactURL)
else:
d = self.registry.getRegistrationInfo(toURL)
d.addCallbacks(self._cbRegister, self._ebRegister,
callbackArgs=(message,),
errbackArgs=(message,)
)
def _cbRegister(self, registration, message):
response = self.responseFromRequest(200, message)
if registration.contactURL != None:
response.addHeader("contact", registration.contactURL.toString())
response.addHeader("expires", "%d" % registration.secondsToExpiry)
response.addHeader("content-length", "0")
self.deliverResponse(response)
def _ebRegister(self, error, message):
error.trap(RegistrationError, LookupError)
# XXX return error message, and alter tests to deal with
# this, currently tests assume no message sent on failure
def unregister(self, message, toURL, contact):
try:
expires = int(message.headers["expires"][0])
except ValueError:
self.deliverResponse(self.responseFromRequest(400, message))
else:
if expires == 0:
if contact == "*":
contactURL = "*"
else:
name, contactURL, params = parseAddress(contact)
d = self.registry.unregisterAddress(message.uri, toURL, contactURL)
d.addCallback(self._cbUnregister, message
).addErrback(self._ebUnregister, message
)
def _cbUnregister(self, registration, message):
msg = self.responseFromRequest(200, message)
msg.headers.setdefault('contact', []).append(registration.contactURL.toString())
msg.addHeader("expires", "0")
self.deliverResponse(msg)
def _ebUnregister(self, registration, message):
pass
class InMemoryRegistry:
"""A simplistic registry for a specific domain."""
implements(IRegistry, ILocator)
def __init__(self, domain):
self.domain = domain # the domain we handle registration for
self.users = {} # map username to (IDelayedCall for expiry, address URI)
def getAddress(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if self.users.has_key(userURI.username):
dc, url = self.users[userURI.username]
return defer.succeed(url)
else:
return defer.fail(LookupError("no such user"))
def getRegistrationInfo(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if self.users.has_key(userURI.username):
dc, url = self.users[userURI.username]
return defer.succeed(Registration(int(dc.getTime() - time.time()), url))
else:
return defer.fail(LookupError("no such user"))
def _expireRegistration(self, username):
try:
dc, url = self.users[username]
except KeyError:
return defer.fail(LookupError("no such user"))
else:
dc.cancel()
del self.users[username]
return defer.succeed(Registration(0, url))
def registerAddress(self, domainURL, logicalURL, physicalURL):
if domainURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if logicalURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if self.users.has_key(logicalURL.username):
dc, old = self.users[logicalURL.username]
dc.reset(3600)
else:
dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username)
log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString()))
self.users[logicalURL.username] = (dc, physicalURL)
return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL))
def unregisterAddress(self, domainURL, logicalURL, physicalURL):
return self._expireRegistration(logicalURL.username)
|
ds-hwang/chromium-crosswalk
|
refs/heads/master
|
third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
|
48
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import unittest
from webkitpy.layout_tests.layout_package import json_results_generator
class JSONGeneratorTest(unittest.TestCase):
def setUp(self):
self.builder_name = 'DUMMY_BUILDER_NAME'
self.build_name = 'DUMMY_BUILD_NAME'
self.build_number = 'DUMMY_BUILDER_NUMBER'
# For archived results.
self._json = None
self._num_runs = 0
self._tests_set = set([])
self._test_timings = {}
self._failed_count_map = {}
self._PASS_count = 0
self._DISABLED_count = 0
self._FLAKY_count = 0
self._FAILS_count = 0
self._fixable_count = 0
def test_strip_json_wrapper(self):
json = "['contents']"
self.assertEqual(json_results_generator.strip_json_wrapper(json_results_generator._JSON_PREFIX + json + json_results_generator._JSON_SUFFIX), json)
self.assertEqual(json_results_generator.strip_json_wrapper(json), json)
def _find_test_in_trie(self, path, trie):
nodes = path.split("/")
sub_trie = trie
for node in nodes:
self.assertIn(node, sub_trie)
sub_trie = sub_trie[node]
return sub_trie
def test_test_timings_trie(self):
individual_test_timings = []
individual_test_timings.append(json_results_generator.TestResult('foo/bar/baz.html', elapsed_time=1.2))
individual_test_timings.append(json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
trie = json_results_generator.test_timings_trie(individual_test_timings)
expected_trie = {
'bar.html': 0,
'foo': {
'bar': {
'baz.html': 1200,
}
}
}
self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
|
weimingtom/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/test/test_html.py
|
89
|
"""
Tests for the html module functions.
"""
import html
import unittest
from test.support import run_unittest
class HtmlTests(unittest.TestCase):
def test_escape(self):
self.assertEqual(
html.escape('\'<script>"&foo;"</script>\''),
''<script>"&foo;"</script>'')
self.assertEqual(
html.escape('\'<script>"&foo;"</script>\'', False),
'\'<script>"&foo;"</script>\'')
def test_main():
run_unittest(HtmlTests)
if __name__ == '__main__':
test_main()
|
davidmueller13/Vindicator-flo-aosp
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py
|
12527
|
# Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
LynxyssCZ/Flexget
|
refs/heads/develop
|
flexget/api/core/tasks.py
|
2
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import argparse
import cgi
import copy
from datetime import datetime, timedelta
from json import JSONEncoder
from flask import jsonify, Response, request
from flask_restplus import inputs
from queue import Queue, Empty
from flexget.api import api, APIResource
from flexget.api.app import APIError, NotFoundError, Conflict, BadRequest, success_response, \
base_message_schema, etag
from flexget.config_schema import process_config
from flexget.entry import Entry
from flexget.event import event
from flexget.options import get_parser
from flexget.task import task_phases
from flexget.utils import json
from flexget.utils import requests
from flexget.utils.lazy_dict import LazyLookup
# Tasks API
tasks_api = api.namespace('tasks', description='Manage Tasks')
class ObjectsContainer(object):
tasks_list_object = {'oneOf': [
{'type': 'array',
'items': {'$ref': '#/definitions/tasks.task'}},
{'type': 'array', 'items': {'type': 'string'}}
]
}
task_input_object = {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'config': {'$ref': '/schema/plugins'}
},
'required': ['name', 'config'],
'additionalProperties': False
}
task_return_object = copy.deepcopy(task_input_object)
task_return_object['properties']['config'] = {'type': 'object'}
task_queue_schema = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'id': {'type': 'integer'},
'name': {'type': 'string'},
'current_phase': {'type': ['string', 'null']},
'current_plugin': {'type': ['string', 'null']},
}}
}
task_execution_results_schema = {
'type': 'object',
'properties': {
'task': {
'type': 'object',
'properties': {
'id': {'type': 'integer'},
'name': {'type': 'string'},
'stream': {
'type': 'array',
'items': {
'progress': {
'type': 'object',
'properties': {
'status': {'type': 'string', 'enum': ['pending', 'running', 'complete']},
'phase': {'type': 'string', 'enum': task_phases},
'plugin': {'type': 'string'},
'percent': {'type': 'float'}
}
},
'summary': {
'type': 'object',
'properties': {
'accepted': {'type': 'integer'},
'rejected': {'type': 'integer'},
'failed': {'type': 'integer'},
'undecided': {'type': 'integer'},
'aborted': {'type': 'boolean'},
'abort_reason': {'type': 'string'},
}
},
'entry_dump': {'type': 'array', 'items': {'type': 'object'}},
'log': {'type': 'string'}
}
}
}
}
}
}
inject_input = {
'type': 'object',
'properties': {
'title': {'type': 'string',
'description': 'Title of the entry. If not supplied it will be attempted to retrieve it from '
'URL headers'},
'url': {'type': 'string',
'format': 'url',
'description': 'URL of the entry'},
'force': {'type': 'boolean',
'description': 'Prevent any plugins from rejecting this entry'},
'accept': {'type': 'boolean',
'description': 'Accept this entry immediately upon injection (disregard task filters)'},
'fields': {'type': 'object',
'description': 'A array of objects that can contain any other value for the entry'}
},
'required': ['url']
}
task_execution_input = {
'type': 'object',
'properties': {
'tasks': {'type': 'array',
'items': {'type': 'string'},
'minItems': 1,
'uniqueItems': True},
'progress': {
'type': 'boolean',
'default': True,
'description': 'Include task progress updates'},
'summary': {
'type': 'boolean',
'default': True,
'description': 'Include task summary'},
'entry_dump': {
'type': 'boolean',
'default': True,
'description': 'Include dump of entries including fields'},
'inject': {'type': 'array',
'items': inject_input,
'description': 'A List of entry objects'},
'loglevel': {'type': 'string',
'description': 'Specify log level',
'enum': ['critical', 'error', 'warning', 'info', 'verbose', 'debug', 'trace']}
},
'required': ['tasks']
}
params_return_schema = {'type': 'array', 'items': {'type': 'object'}}
tasks_list_schema = api.schema_model('tasks.list', ObjectsContainer.tasks_list_object)
task_input_schema = api.schema_model('tasks.task', ObjectsContainer.task_input_object)
task_return_schema = api.schema_model('tasks.task', ObjectsContainer.task_return_object)
task_api_queue_schema = api.schema_model('task.queue', ObjectsContainer.task_queue_schema)
task_api_execute_schema = api.schema_model('task.execution', ObjectsContainer.task_execution_results_schema)
task_execution_schema = api.schema_model('task_execution_input', ObjectsContainer.task_execution_input)
task_execution_params = api.schema_model('tasks.execution_params', ObjectsContainer.params_return_schema)
task_api_desc = 'Task config schema too large to display, you can view the schema using the schema API'
tasks_parser = api.parser()
tasks_parser.add_argument('include_config', type=inputs.boolean, default=True, help='Include task config')
@tasks_api.route('/')
@api.doc(description=task_api_desc)
class TasksAPI(APIResource):
@etag
@api.response(200, model=tasks_list_schema)
@api.doc(parser=tasks_parser)
def get(self, session=None):
""" List all tasks """
active_tasks = {task: task_data for task, task_data in self.manager.user_config.get('tasks', {}).items()
if not task.startswith('_')}
args = tasks_parser.parse_args()
if not args.get('include_config'):
return jsonify(list(active_tasks))
tasks = [{'name': name, 'config': config} for name, config in active_tasks.items()]
return jsonify(tasks)
@api.validate(task_input_schema, description='New task object')
@api.response(201, description='Newly created task', model=task_return_schema)
@api.response(Conflict)
@api.response(APIError)
def post(self, session=None):
""" Add new task """
data = request.json
task_name = data['name']
if task_name in self.manager.user_config.get('tasks', {}):
raise Conflict('task already exists')
if 'tasks' not in self.manager.user_config:
self.manager.user_config['tasks'] = {}
if 'tasks' not in self.manager.config:
self.manager.config['tasks'] = {}
task_schema_processed = copy.deepcopy(data)
errors = process_config(task_schema_processed, schema=task_input_schema.__schema__, set_defaults=True)
if errors:
raise APIError('problem loading config, raise a BUG as this should not happen!')
self.manager.user_config['tasks'][task_name] = data['config']
self.manager.config['tasks'][task_name] = task_schema_processed['config']
self.manager.save_config()
self.manager.config_changed()
rsp = jsonify({'name': task_name, 'config': self.manager.user_config['tasks'][task_name]})
rsp.status_code = 201
return rsp
@tasks_api.route('/<task>/')
@api.doc(params={'task': 'task name'}, description=task_api_desc)
@api.response(APIError, description='unable to read config')
class TaskAPI(APIResource):
@etag
@api.response(200, model=task_return_schema)
@api.response(NotFoundError, description='task not found')
def get(self, task, session=None):
""" Get task config """
if task not in self.manager.user_config.get('tasks', {}):
raise NotFoundError('task `%s` not found' % task)
return jsonify({'name': task, 'config': self.manager.user_config['tasks'][task]})
@api.validate(task_input_schema)
@api.response(200, model=task_return_schema)
@api.response(NotFoundError)
@api.response(BadRequest)
def put(self, task, session=None):
""" Update tasks config """
data = request.json
new_task_name = data['name']
if task not in self.manager.user_config.get('tasks', {}):
raise NotFoundError('task `%s` not found' % task)
if 'tasks' not in self.manager.user_config:
self.manager.user_config['tasks'] = {}
if 'tasks' not in self.manager.config:
self.manager.config['tasks'] = {}
if task != new_task_name:
# Rename task
if new_task_name in self.manager.user_config['tasks']:
raise BadRequest('cannot rename task as it already exist')
del self.manager.user_config['tasks'][task]
del self.manager.config['tasks'][task]
# Process the task config
task_schema_processed = copy.deepcopy(data)
errors = process_config(task_schema_processed, schema=task_return_schema.__schema__, set_defaults=True)
if errors:
raise APIError('problem loading config, raise a BUG as this should not happen!')
self.manager.user_config['tasks'][new_task_name] = data['config']
self.manager.config['tasks'][new_task_name] = task_schema_processed['config']
self.manager.save_config()
self.manager.config_changed()
rsp = jsonify({'name': new_task_name, 'config': self.manager.user_config['tasks'][new_task_name]})
rsp.status_code = 200
return rsp
@api.response(200, model=base_message_schema, description='deleted task')
@api.response(NotFoundError)
def delete(self, task, session=None):
""" Delete a task """
try:
self.manager.config['tasks'].pop(task)
self.manager.user_config['tasks'].pop(task)
except KeyError:
raise NotFoundError('task does not exist')
self.manager.save_config()
self.manager.config_changed()
return success_response('successfully deleted task')
default_start_date = (datetime.now() - timedelta(weeks=1)).strftime('%Y-%m-%d')
status_parser = api.parser()
status_parser.add_argument('succeeded', type=inputs.boolean, default=True, help='Filter by success status')
status_parser.add_argument('produced', type=inputs.boolean, default=True, store_missing=False,
help='Filter by tasks that produced entries')
status_parser.add_argument('start_date', type=inputs.datetime_from_iso8601, default=default_start_date,
help='Filter by minimal start date. Example: \'2012-01-01\'')
status_parser.add_argument('end_date', type=inputs.datetime_from_iso8601,
help='Filter by maximal end date. Example: \'2012-01-01\'')
status_parser.add_argument('limit', default=100, type=int,
help='Limit return of executions per task, as that number can be huge')
def _task_info_dict(task):
return {
'id': int(task.id),
'name': task.name,
'current_phase': task.current_phase,
'current_plugin': task.current_plugin,
}
@tasks_api.route('/queue/')
class TaskQueueAPI(APIResource):
@api.response(200, model=task_api_queue_schema)
def get(self, session=None):
""" List task(s) in queue for execution """
tasks = [_task_info_dict(task) for task in self.manager.task_queue.run_queue.queue]
if self.manager.task_queue.current_task:
tasks.insert(0, _task_info_dict(self.manager.task_queue.current_task))
return jsonify(tasks)
class ExecuteLog(Queue):
""" Supports task log streaming by acting like a file object """
def write(self, s):
self.put(json.dumps({'log': s}))
_streams = {}
# Another namespace for the same endpoint
inject_api = api.namespace('inject', description='Entry injection API')
@inject_api.route('/params/')
@tasks_api.route('/execute/params/')
@api.doc(description='Available payload parameters for task execute')
class TaskExecutionParams(APIResource):
@etag(cache_age=3600)
@api.response(200, model=task_execution_params)
def get(self, session=None):
""" Execute payload parameters """
return jsonify(ObjectsContainer.task_execution_input)
@inject_api.route('/')
@tasks_api.route('/execute/')
@api.doc(description='For details on available parameters query /params/ endpoint')
class TaskExecutionAPI(APIResource):
@api.response(NotFoundError)
@api.response(BadRequest)
@api.response(200, model=task_api_execute_schema)
@api.validate(task_execution_schema)
def post(self, session=None):
""" Execute task and stream results """
data = request.json
for task in data.get('tasks'):
if task.lower() not in [t.lower() for t in self.manager.user_config.get('tasks', {}).keys()]:
raise NotFoundError('task %s does not exist' % task)
queue = ExecuteLog()
output = queue if data.get('loglevel') else None
stream = True if any(
arg[0] in ['progress', 'summary', 'loglevel', 'entry_dump'] for arg in data.items() if arg[1]) else False
loglevel = data.pop('loglevel', None)
# This emulates the CLI command of using `--now` and `no-cache`
options = {'interval_ignore': data.pop('now', None),
'nocache': data.pop('no_cache', None),
'allow_manual': True}
for option, value in data.items():
options[option] = value
if data.get('inject'):
entries = []
for item in data.get('inject'):
entry = Entry()
entry['url'] = item['url']
if not item.get('title'):
try:
value, params = cgi.parse_header(requests.head(item['url']).headers['Content-Disposition'])
entry['title'] = params['filename']
except KeyError:
raise BadRequest('No title given, and couldn\'t get one from the URL\'s HTTP response')
else:
entry['title'] = item.get('title')
if item.get('force'):
entry['immortal'] = True
if item.get('accept'):
entry.accept(reason='accepted by API inject')
if item.get('fields'):
for key, value in item.get('fields').items():
entry[key] = value
entries.append(entry)
options['inject'] = entries
executed_tasks = self.manager.execute(options=options, output=output, loglevel=loglevel)
tasks_queued = []
for task_id, task_name, task_event in executed_tasks:
tasks_queued.append({'id': task_id, 'name': task_name, 'event': task_event})
_streams[task_id] = {
'queue': queue,
'last_update': datetime.now(),
'args': data
}
if not stream:
return jsonify({'tasks': [{'id': task['id'], 'name': task['name']} for task in tasks_queued]})
def stream_response():
# First return the tasks to execute
yield '{"stream": ['
yield json.dumps({'tasks': [{'id': task['id'], 'name': task['name']} for task in tasks_queued]}) + ',\n'
while True:
try:
yield queue.get(timeout=1) + ',\n'
continue
except Empty:
pass
if queue.empty() and all([task['event'].is_set() for task in tasks_queued]):
for task in tasks_queued:
del _streams[task['id']]
break
yield '{}]}'
return Response(stream_response(), mimetype='text/event-stream')
@event('manager.daemon.started')
def setup_params(mgr):
parser = get_parser('execute')
for action in parser._optionals._actions:
# Ignore list for irrelevant actions
ignore = ['help', 'verbose', 'silent', 'try-regexp', 'dump-config', 'dump']
name = action.option_strings[-1].strip('--')
if name in ignore or action.help == '==SUPPRESS==':
continue
name = name.replace('-', '_')
property_data = {'description': action.help.capitalize()}
if isinstance(action, argparse._StoreConstAction):
property_data['type'] = 'boolean'
elif isinstance(action, argparse._StoreAction):
if action.nargs in ['+', '*']:
property_data['type'] = 'array'
property_data['items'] = {'type': 'string'}
property_data['minItems'] = 1
else:
property_data['type'] = 'string'
else:
# Unknown actions should not be added to schema
property_data = None
# Some options maybe pre-added to schema with additional options, don't override them
if property_data and name not in ObjectsContainer.task_execution_input['properties']:
ObjectsContainer.task_execution_input['properties'][name] = property_data
ObjectsContainer.task_execution_input['additionalProperties'] = False
class EntryDecoder(JSONEncoder):
def default(self, o):
if isinstance(o, LazyLookup):
return '<LazyField>'
try:
return JSONEncoder.default(self, o)
except TypeError:
return str(o)
_phase_percents = {
'input': 5,
'metainfo': 10,
'filter': 30,
'download': 40,
'modify': 65,
'output': 75,
'exit': 100,
}
def update_stream(task, status='pending'):
if task.current_phase in _phase_percents:
task.stream['percent'] = _phase_percents[task.current_phase]
progress = {
'status': status,
'phase': task.current_phase,
'plugin': task.current_plugin,
'percent': task.stream.get('percent', 0)
}
task.stream['queue'].put(json.dumps({'progress': progress}))
@event('task.execute.started')
def start_task(task):
task.stream = _streams.get(task.id)
if task.stream and task.stream['args'].get('progress'):
update_stream(task, status='running')
@event('task.execute.completed')
def finish_task(task):
if task.stream:
if task.stream['args'].get('progress'):
update_stream(task, status='complete')
if task.stream['args'].get('entry_dump'):
entries = [entry.store for entry in task.entries]
task.stream['queue'].put(EntryDecoder().encode({'entry_dump': entries}))
if task.stream['args'].get('summary'):
task.stream['queue'].put(json.dumps({
'summary': {
'accepted': len(task.accepted),
'rejected': len(task.rejected),
'failed': len(task.failed),
'undecided': len(task.undecided),
'aborted': task.aborted,
'abort_reason': task.abort_reason,
}
}))
@event('task.execute.before_plugin')
def track_progress(task, plugin_name):
if task.stream and task.stream['args'].get('progress'):
update_stream(task, status='running')
|
mrkm4ntr/incubator-airflow
|
refs/heads/master
|
airflow/contrib/sensors/gcs_sensor.py
|
7
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs`."""
import warnings
from airflow.providers.google.cloud.sensors.gcs import (
GCSObjectExistenceSensor,
GCSObjectsWtihPrefixExistenceSensor,
GCSObjectUpdateSensor,
GCSUploadSessionCompleteSensor,
)
warnings.warn(
"This module is deprecated. Please use `airflow.providers.google.cloud.sensors.gcs`.",
DeprecationWarning,
stacklevel=2,
)
class GoogleCloudStorageObjectSensor(GCSObjectExistenceSensor):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
class GoogleCloudStorageObjectUpdatedSensor(GCSObjectUpdateSensor):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
class GoogleCloudStoragePrefixSensor(GCSObjectsWtihPrefixExistenceSensor):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
class GoogleCloudStorageUploadSessionCompleteSensor(GCSUploadSessionCompleteSensor):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
|
OLIMEX/linux-3.12.10-ti2013.12.01-am3352_som
|
refs/heads/master
|
Documentation/target/tcm_mod_builder.py
|
88
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (kstrtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
slevenhagen/odoo
|
refs/heads/8.0
|
addons/hw_scale/__openerp__.py
|
220
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Weighting Scale Hardware Driver',
'version': '1.0',
'category': 'Hardware Drivers',
'sequence': 6,
'summary': 'Hardware Driver for Weighting Scales',
'website': 'https://www.odoo.com/page/point-of-sale',
'description': """
Barcode Scanner Hardware Driver
================================
This module allows the point of sale to connect to a scale using a USB HSM Serial Scale Interface,
such as the Mettler Toledo Ariva.
""",
'author': 'OpenERP SA',
'depends': ['hw_proxy'],
'external_dependencies': {'python': ['serial']},
'test': [
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
c0defreak/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/trial/test/mockcustomsuite.py
|
87
|
# Copyright (c) 2006 Twisted Matrix Laboratories. See LICENSE for details
"""
Mock test module that contains a C{test_suite} method. L{runner.TestLoader}
should load the tests from the C{test_suite}, not from the C{Foo} C{TestCase}.
See {twisted.trial.test.test_loader.LoaderTest.test_loadModuleWith_test_suite}.
"""
from twisted.trial import unittest, runner
class Foo(unittest.TestCase):
def test_foo(self):
pass
def test_suite():
ts = runner.TestSuite()
ts.name = "MyCustomSuite"
return ts
|
laiqiqi886/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/xmlrpc/client.py
|
69
|
#
# XML-RPC CLIENT LIBRARY
# $Id$
#
# an XML-RPC client interface for Python.
#
# the marshalling and response parser code can also be used to
# implement XML-RPC servers.
#
# Notes:
# this version is designed to work with Python 2.1 or newer.
#
# History:
# 1999-01-14 fl Created
# 1999-01-15 fl Changed dateTime to use localtime
# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service
# 1999-01-19 fl Fixed array data element (from Skip Montanaro)
# 1999-01-21 fl Fixed dateTime constructor, etc.
# 1999-02-02 fl Added fault handling, handle empty sequences, etc.
# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro)
# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8)
# 2000-11-28 fl Changed boolean to check the truth value of its argument
# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches
# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1)
# 2001-03-28 fl Make sure response tuple is a singleton
# 2001-03-29 fl Don't require empty params element (from Nicholas Riley)
# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2)
# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod)
# 2001-09-03 fl Allow Transport subclass to override getparser
# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup)
# 2001-10-01 fl Remove containers from memo cache when done with them
# 2001-10-01 fl Use faster escape method (80% dumps speedup)
# 2001-10-02 fl More dumps microtuning
# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum)
# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow
# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems)
# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix)
# 2002-03-17 fl Avoid buffered read when possible (from James Rucker)
# 2002-04-07 fl Added pythondoc comments
# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers
# 2002-05-15 fl Added error constants (from Andrew Kuchling)
# 2002-06-27 fl Merged with Python CVS version
# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby)
# 2003-01-22 sm Add support for the bool type
# 2003-02-27 gvr Remove apply calls
# 2003-04-24 sm Use cStringIO if available
# 2003-04-25 ak Add support for nil
# 2003-06-15 gn Add support for time.struct_time
# 2003-07-12 gp Correct marshalling of Faults
# 2003-10-31 mvl Add multicall support
# 2004-08-20 mvl Bump minimum supported Python version to 2.1
#
# Copyright (c) 1999-2002 by Secret Labs AB.
# Copyright (c) 1999-2002 by Fredrik Lundh.
#
# info@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The XML-RPC client interface is
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
"""
An XML-RPC client interface for Python.
The marshalling and response parser code can also be used to
implement XML-RPC servers.
Exported exceptions:
Error Base class for client errors
ProtocolError Indicates an HTTP protocol error
ResponseError Indicates a broken response package
Fault Indicates an XML-RPC fault package
Exported classes:
ServerProxy Represents a logical connection to an XML-RPC server
MultiCall Executor of boxcared xmlrpc requests
DateTime dateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate a "dateTime.iso8601"
XML-RPC value
Binary binary data wrapper
Marshaller Generate an XML-RPC params chunk from a Python data structure
Unmarshaller Unmarshal an XML-RPC response from incoming XML event message
Transport Handles an HTTP transaction to an XML-RPC server
SafeTransport Handles an HTTPS transaction to an XML-RPC server
Exported constants:
(none)
Exported functions:
getparser Create instance of the fastest available parser & attach
to an unmarshalling object
dumps Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
loads Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
"""
import base64
import sys
import time
from datetime import datetime
import http.client
import urllib.parse
from xml.parsers import expat
import errno
from io import BytesIO
try:
import gzip
except ImportError:
gzip = None #python can be built without zlib/gzip support
# --------------------------------------------------------------------
# Internal stuff
def escape(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
return s.replace(">", ">",)
# used in User-Agent header sent
__version__ = sys.version[:3]
# xmlrpc integer limits
MAXINT = 2**31-1
MININT = -2**31
# --------------------------------------------------------------------
# Error constants (from Dan Libby's specification at
# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php)
# Ranges of errors
PARSE_ERROR = -32700
SERVER_ERROR = -32600
APPLICATION_ERROR = -32500
SYSTEM_ERROR = -32400
TRANSPORT_ERROR = -32300
# Specific errors
NOT_WELLFORMED_ERROR = -32700
UNSUPPORTED_ENCODING = -32701
INVALID_ENCODING_CHAR = -32702
INVALID_XMLRPC = -32600
METHOD_NOT_FOUND = -32601
INVALID_METHOD_PARAMS = -32602
INTERNAL_ERROR = -32603
# --------------------------------------------------------------------
# Exceptions
##
# Base class for all kinds of client-side errors.
class Error(Exception):
"""Base class for client errors."""
def __str__(self):
return repr(self)
##
# Indicates an HTTP-level protocol error. This is raised by the HTTP
# transport layer, if the server returns an error code other than 200
# (OK).
#
# @param url The target URL.
# @param errcode The HTTP error code.
# @param errmsg The HTTP error message.
# @param headers The HTTP header dictionary.
class ProtocolError(Error):
"""Indicates an HTTP protocol error."""
def __init__(self, url, errcode, errmsg, headers):
Error.__init__(self)
self.url = url
self.errcode = errcode
self.errmsg = errmsg
self.headers = headers
def __repr__(self):
return (
"<ProtocolError for %s: %s %s>" %
(self.url, self.errcode, self.errmsg)
)
##
# Indicates a broken XML-RPC response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response is
# malformed.
class ResponseError(Error):
"""Indicates a broken response package."""
pass
##
# Indicates an XML-RPC fault response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response contains
# a fault string. This exception can also be used as a class, to
# generate a fault XML-RPC message.
#
# @param faultCode The XML-RPC fault code.
# @param faultString The XML-RPC fault string.
class Fault(Error):
"""Indicates an XML-RPC fault package."""
def __init__(self, faultCode, faultString, **extra):
Error.__init__(self)
self.faultCode = faultCode
self.faultString = faultString
def __repr__(self):
return "<Fault %s: %r>" % (self.faultCode, self.faultString)
# --------------------------------------------------------------------
# Special values
##
# Backwards compatibility
boolean = Boolean = bool
##
# Wrapper for XML-RPC DateTime values. This converts a time value to
# the format used by XML-RPC.
# <p>
# The value can be given as a datetime object, as a string in the
# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
# time.localtime()), or an integer value (as returned by time.time()).
# The wrapper uses time.localtime() to convert an integer to a time
# tuple.
#
# @param value The time, given as a datetime object, an ISO 8601 string,
# a time tuple, or an integer time value.
# Issue #13305: different format codes across platforms
_day0 = datetime(1, 1, 1)
if _day0.strftime('%Y') == '0001': # Mac OS X
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S")
elif _day0.strftime('%4Y') == '0001': # Linux
def _iso8601_format(value):
return value.strftime("%4Y%m%dT%H:%M:%S")
else:
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S").zfill(17)
del _day0
def _strftime(value):
if isinstance(value, datetime):
return _iso8601_format(value)
if not isinstance(value, (tuple, time.struct_time)):
if value == 0:
value = time.time()
value = time.localtime(value)
return "%04d%02d%02dT%02d:%02d:%02d" % value[:6]
class DateTime:
"""DateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate 'dateTime.iso8601' XML-RPC
value.
"""
def __init__(self, value=0):
if isinstance(value, str):
self.value = value
else:
self.value = _strftime(value)
def make_comparable(self, other):
if isinstance(other, DateTime):
s = self.value
o = other.value
elif isinstance(other, datetime):
s = self.value
o = _iso8601_format(other)
elif isinstance(other, str):
s = self.value
o = other
elif hasattr(other, "timetuple"):
s = self.timetuple()
o = other.timetuple()
else:
otype = (hasattr(other, "__class__")
and other.__class__.__name__
or type(other))
raise TypeError("Can't compare %s and %s" %
(self.__class__.__name__, otype))
return s, o
def __lt__(self, other):
s, o = self.make_comparable(other)
return s < o
def __le__(self, other):
s, o = self.make_comparable(other)
return s <= o
def __gt__(self, other):
s, o = self.make_comparable(other)
return s > o
def __ge__(self, other):
s, o = self.make_comparable(other)
return s >= o
def __eq__(self, other):
s, o = self.make_comparable(other)
return s == o
def __ne__(self, other):
s, o = self.make_comparable(other)
return s != o
def timetuple(self):
return time.strptime(self.value, "%Y%m%dT%H:%M:%S")
##
# Get date/time value.
#
# @return Date/time value, as an ISO 8601 string.
def __str__(self):
return self.value
def __repr__(self):
return "<DateTime %r at %x>" % (self.value, id(self))
def decode(self, data):
self.value = str(data).strip()
def encode(self, out):
out.write("<value><dateTime.iso8601>")
out.write(self.value)
out.write("</dateTime.iso8601></value>\n")
def _datetime(data):
# decode xml element contents into a DateTime structure.
value = DateTime()
value.decode(data)
return value
def _datetime_type(data):
return datetime.strptime(data, "%Y%m%dT%H:%M:%S")
##
# Wrapper for binary data. This can be used to transport any kind
# of binary data over XML-RPC, using BASE64 encoding.
#
# @param data An 8-bit string containing arbitrary data.
class Binary:
"""Wrapper for binary data."""
def __init__(self, data=None):
if data is None:
data = b""
else:
if not isinstance(data, (bytes, bytearray)):
raise TypeError("expected bytes or bytearray, not %s" %
data.__class__.__name__)
data = bytes(data) # Make a copy of the bytes!
self.data = data
##
# Get buffer contents.
#
# @return Buffer contents, as an 8-bit string.
def __str__(self):
return str(self.data, "latin-1") # XXX encoding?!
def __eq__(self, other):
if isinstance(other, Binary):
other = other.data
return self.data == other
def __ne__(self, other):
if isinstance(other, Binary):
other = other.data
return self.data != other
def decode(self, data):
self.data = base64.decodebytes(data)
def encode(self, out):
out.write("<value><base64>\n")
encoded = base64.encodebytes(self.data)
out.write(encoded.decode('ascii'))
out.write("</base64></value>\n")
def _binary(data):
# decode xml element contents into a Binary structure
value = Binary()
value.decode(data)
return value
WRAPPERS = (DateTime, Binary)
# --------------------------------------------------------------------
# XML parsers
class ExpatParser:
# fast expat parser for Python 2.0 and later.
def __init__(self, target):
self._parser = parser = expat.ParserCreate(None, None)
self._target = target
parser.StartElementHandler = target.start
parser.EndElementHandler = target.end
parser.CharacterDataHandler = target.data
encoding = None
target.xml(encoding, None)
def feed(self, data):
self._parser.Parse(data, 0)
def close(self):
self._parser.Parse("", 1) # end of data
del self._target, self._parser # get rid of circular references
# --------------------------------------------------------------------
# XML-RPC marshalling and unmarshalling code
##
# XML-RPC marshaller.
#
# @param encoding Default encoding for 8-bit strings. The default
# value is None (interpreted as UTF-8).
# @see dumps
class Marshaller:
"""Generate an XML-RPC params chunk from a Python data structure.
Create a Marshaller instance for each set of parameters, and use
the "dumps" method to convert your data (represented as a tuple)
to an XML-RPC params chunk. To write a fault response, pass a
Fault instance instead. You may prefer to use the "dumps" module
function for this purpose.
"""
# by the way, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, encoding=None, allow_none=False):
self.memo = {}
self.data = None
self.encoding = encoding
self.allow_none = allow_none
dispatch = {}
def dumps(self, values):
out = []
write = out.append
dump = self.__dump
if isinstance(values, Fault):
# fault instance
write("<fault>\n")
dump({'faultCode': values.faultCode,
'faultString': values.faultString},
write)
write("</fault>\n")
else:
# parameter block
# FIXME: the xml-rpc specification allows us to leave out
# the entire <params> block if there are no parameters.
# however, changing this may break older code (including
# old versions of xmlrpclib.py), so this is better left as
# is for now. See @XMLRPC3 for more information. /F
write("<params>\n")
for v in values:
write("<param>\n")
dump(v, write)
write("</param>\n")
write("</params>\n")
result = "".join(out)
return result
def __dump(self, value, write):
try:
f = self.dispatch[type(value)]
except KeyError:
# check if this object can be marshalled as a structure
if not hasattr(value, '__dict__'):
raise TypeError("cannot marshal %s objects" % type(value))
# check if this class is a sub-class of a basic type,
# because we don't know how to marshal these types
# (e.g. a string sub-class)
for type_ in type(value).__mro__:
if type_ in self.dispatch.keys():
raise TypeError("cannot marshal %s objects" % type(value))
# XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
# for the p3yk merge, this should probably be fixed more neatly.
f = self.dispatch["_arbitrary_instance"]
f(self, value, write)
def dump_nil (self, value, write):
if not self.allow_none:
raise TypeError("cannot marshal None unless allow_none is enabled")
write("<value><nil/></value>")
dispatch[type(None)] = dump_nil
def dump_bool(self, value, write):
write("<value><boolean>")
write(value and "1" or "0")
write("</boolean></value>\n")
dispatch[bool] = dump_bool
def dump_long(self, value, write):
if value > MAXINT or value < MININT:
raise OverflowError("int exceeds XML-RPC limits")
write("<value><int>")
write(str(int(value)))
write("</int></value>\n")
dispatch[int] = dump_long
# backward compatible
dump_int = dump_long
def dump_double(self, value, write):
write("<value><double>")
write(repr(value))
write("</double></value>\n")
dispatch[float] = dump_double
def dump_unicode(self, value, write, escape=escape):
write("<value><string>")
write(escape(value))
write("</string></value>\n")
dispatch[str] = dump_unicode
def dump_bytes(self, value, write):
write("<value><base64>\n")
encoded = base64.encodebytes(value)
write(encoded.decode('ascii'))
write("</base64></value>\n")
dispatch[bytes] = dump_bytes
dispatch[bytearray] = dump_bytes
def dump_array(self, value, write):
i = id(value)
if i in self.memo:
raise TypeError("cannot marshal recursive sequences")
self.memo[i] = None
dump = self.__dump
write("<value><array><data>\n")
for v in value:
dump(v, write)
write("</data></array></value>\n")
del self.memo[i]
dispatch[tuple] = dump_array
dispatch[list] = dump_array
def dump_struct(self, value, write, escape=escape):
i = id(value)
if i in self.memo:
raise TypeError("cannot marshal recursive dictionaries")
self.memo[i] = None
dump = self.__dump
write("<value><struct>\n")
for k, v in value.items():
write("<member>\n")
if not isinstance(k, str):
raise TypeError("dictionary key must be string")
write("<name>%s</name>\n" % escape(k))
dump(v, write)
write("</member>\n")
write("</struct></value>\n")
del self.memo[i]
dispatch[dict] = dump_struct
def dump_datetime(self, value, write):
write("<value><dateTime.iso8601>")
write(_strftime(value))
write("</dateTime.iso8601></value>\n")
dispatch[datetime] = dump_datetime
def dump_instance(self, value, write):
# check for special wrappers
if value.__class__ in WRAPPERS:
self.write = write
value.encode(self)
del self.write
else:
# store instance attributes as a struct (really?)
self.dump_struct(value.__dict__, write)
dispatch[DateTime] = dump_instance
dispatch[Binary] = dump_instance
# XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
# for the p3yk merge, this should probably be fixed more neatly.
dispatch["_arbitrary_instance"] = dump_instance
##
# XML-RPC unmarshaller.
#
# @see loads
class Unmarshaller:
"""Unmarshal an XML-RPC response, based on incoming XML event
messages (start, data, end). Call close() to get the resulting
data structure.
Note that this reader is fairly tolerant, and gladly accepts bogus
XML-RPC data without complaining (but not bogus XML).
"""
# and again, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, use_datetime=False, use_builtin_types=False):
self._type = None
self._stack = []
self._marks = []
self._data = []
self._methodname = None
self._encoding = "utf-8"
self.append = self._stack.append
self._use_datetime = use_builtin_types or use_datetime
self._use_bytes = use_builtin_types
def close(self):
# return response tuple and target method
if self._type is None or self._marks:
raise ResponseError()
if self._type == "fault":
raise Fault(**self._stack[0])
return tuple(self._stack)
def getmethodname(self):
return self._methodname
#
# event handlers
def xml(self, encoding, standalone):
self._encoding = encoding
# FIXME: assert standalone == 1 ???
def start(self, tag, attrs):
# prepare to handle this element
if tag == "array" or tag == "struct":
self._marks.append(len(self._stack))
self._data = []
self._value = (tag == "value")
def data(self, text):
self._data.append(text)
def end(self, tag):
# call the appropriate end tag handler
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, "".join(self._data))
#
# accelerator support
def end_dispatch(self, tag, data):
# dispatch data
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, data)
#
# element decoders
dispatch = {}
def end_nil (self, data):
self.append(None)
self._value = 0
dispatch["nil"] = end_nil
def end_boolean(self, data):
if data == "0":
self.append(False)
elif data == "1":
self.append(True)
else:
raise TypeError("bad boolean value")
self._value = 0
dispatch["boolean"] = end_boolean
def end_int(self, data):
self.append(int(data))
self._value = 0
dispatch["i4"] = end_int
dispatch["i8"] = end_int
dispatch["int"] = end_int
def end_double(self, data):
self.append(float(data))
self._value = 0
dispatch["double"] = end_double
def end_string(self, data):
if self._encoding:
data = data.decode(self._encoding)
self.append(data)
self._value = 0
dispatch["string"] = end_string
dispatch["name"] = end_string # struct keys are always strings
def end_array(self, data):
mark = self._marks.pop()
# map arrays to Python lists
self._stack[mark:] = [self._stack[mark:]]
self._value = 0
dispatch["array"] = end_array
def end_struct(self, data):
mark = self._marks.pop()
# map structs to Python dictionaries
dict = {}
items = self._stack[mark:]
for i in range(0, len(items), 2):
dict[items[i]] = items[i+1]
self._stack[mark:] = [dict]
self._value = 0
dispatch["struct"] = end_struct
def end_base64(self, data):
value = Binary()
value.decode(data.encode("ascii"))
if self._use_bytes:
value = value.data
self.append(value)
self._value = 0
dispatch["base64"] = end_base64
def end_dateTime(self, data):
value = DateTime()
value.decode(data)
if self._use_datetime:
value = _datetime_type(data)
self.append(value)
dispatch["dateTime.iso8601"] = end_dateTime
def end_value(self, data):
# if we stumble upon a value element with no internal
# elements, treat it as a string element
if self._value:
self.end_string(data)
dispatch["value"] = end_value
def end_params(self, data):
self._type = "params"
dispatch["params"] = end_params
def end_fault(self, data):
self._type = "fault"
dispatch["fault"] = end_fault
def end_methodName(self, data):
if self._encoding:
data = data.decode(self._encoding)
self._methodname = data
self._type = "methodName" # no params
dispatch["methodName"] = end_methodName
## Multicall support
#
class _MultiCallMethod:
# some lesser magic to store calls made to a MultiCall object
# for batch execution
def __init__(self, call_list, name):
self.__call_list = call_list
self.__name = name
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
def __call__(self, *args):
self.__call_list.append((self.__name, args))
class MultiCallIterator:
"""Iterates over the results of a multicall. Exceptions are
raised in response to xmlrpc faults."""
def __init__(self, results):
self.results = results
def __getitem__(self, i):
item = self.results[i]
if type(item) == type({}):
raise Fault(item['faultCode'], item['faultString'])
elif type(item) == type([]):
return item[0]
else:
raise ValueError("unexpected type in multicall result")
class MultiCall:
"""server -> a object used to boxcar method calls
server should be a ServerProxy object.
Methods can be added to the MultiCall using normal
method call syntax e.g.:
multicall = MultiCall(server_proxy)
multicall.add(2,3)
multicall.get_address("Guido")
To execute the multicall, call the MultiCall object e.g.:
add_result, address = multicall()
"""
def __init__(self, server):
self.__server = server
self.__call_list = []
def __repr__(self):
return "<MultiCall at %x>" % id(self)
__str__ = __repr__
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, name)
def __call__(self):
marshalled_list = []
for name, args in self.__call_list:
marshalled_list.append({'methodName' : name, 'params' : args})
return MultiCallIterator(self.__server.system.multicall(marshalled_list))
# --------------------------------------------------------------------
# convenience functions
FastMarshaller = FastParser = FastUnmarshaller = None
##
# Create a parser object, and connect it to an unmarshalling instance.
# This function picks the fastest available XML parser.
#
# return A (parser, unmarshaller) tuple.
def getparser(use_datetime=False, use_builtin_types=False):
"""getparser() -> parser, unmarshaller
Create an instance of the fastest available parser, and attach it
to an unmarshalling object. Return both objects.
"""
if FastParser and FastUnmarshaller:
if use_builtin_types:
mkdatetime = _datetime_type
mkbytes = base64.decodebytes
elif use_datetime:
mkdatetime = _datetime_type
mkbytes = _binary
else:
mkdatetime = _datetime
mkbytes = _binary
target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault)
parser = FastParser(target)
else:
target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
if FastParser:
parser = FastParser(target)
else:
parser = ExpatParser(target)
return parser, target
##
# Convert a Python tuple or a Fault instance to an XML-RPC packet.
#
# @def dumps(params, **options)
# @param params A tuple or Fault instance.
# @keyparam methodname If given, create a methodCall request for
# this method name.
# @keyparam methodresponse If given, create a methodResponse packet.
# If used with a tuple, the tuple must be a singleton (that is,
# it must contain exactly one element).
# @keyparam encoding The packet encoding.
# @return A string containing marshalled data.
def dumps(params, methodname=None, methodresponse=None, encoding=None,
allow_none=False):
"""data [,options] -> marshalled data
Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
In addition to the data object, the following options can be given
as keyword arguments:
methodname: the method name for a methodCall packet
methodresponse: true to create a methodResponse packet.
If this option is used with a tuple, the tuple must be
a singleton (i.e. it can contain only one element).
encoding: the packet encoding (default is UTF-8)
All byte strings in the data structure are assumed to use the
packet encoding. Unicode strings are automatically converted,
where necessary.
"""
assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance"
if isinstance(params, Fault):
methodresponse = 1
elif methodresponse and isinstance(params, tuple):
assert len(params) == 1, "response tuple must be a singleton"
if not encoding:
encoding = "utf-8"
if FastMarshaller:
m = FastMarshaller(encoding)
else:
m = Marshaller(encoding, allow_none)
data = m.dumps(params)
if encoding != "utf-8":
xmlheader = "<?xml version='1.0' encoding='%s'?>\n" % str(encoding)
else:
xmlheader = "<?xml version='1.0'?>\n" # utf-8 is default
# standard XML-RPC wrappings
if methodname:
# a method call
if not isinstance(methodname, str):
methodname = methodname.encode(encoding)
data = (
xmlheader,
"<methodCall>\n"
"<methodName>", methodname, "</methodName>\n",
data,
"</methodCall>\n"
)
elif methodresponse:
# a method response, or a fault structure
data = (
xmlheader,
"<methodResponse>\n",
data,
"</methodResponse>\n"
)
else:
return data # return as is
return "".join(data)
##
# Convert an XML-RPC packet to a Python object. If the XML-RPC packet
# represents a fault condition, this function raises a Fault exception.
#
# @param data An XML-RPC packet, given as an 8-bit string.
# @return A tuple containing the unpacked data, and the method name
# (None if not present).
# @see Fault
def loads(data, use_datetime=False, use_builtin_types=False):
"""data -> unmarshalled data, method name
Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
If the XML-RPC packet represents a fault condition, this function
raises a Fault exception.
"""
p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
p.feed(data)
p.close()
return u.close(), u.getmethodname()
##
# Encode a string using the gzip content encoding such as specified by the
# Content-Encoding: gzip
# in the HTTP header, as described in RFC 1952
#
# @param data the unencoded data
# @return the encoded data
def gzip_encode(data):
"""data -> gzip encoded data
Encode data using the gzip content encoding as described in RFC 1952
"""
if not gzip:
raise NotImplementedError
f = BytesIO()
gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1)
gzf.write(data)
gzf.close()
encoded = f.getvalue()
f.close()
return encoded
##
# Decode a string using the gzip content encoding such as specified by the
# Content-Encoding: gzip
# in the HTTP header, as described in RFC 1952
#
# @param data The encoded data
# @return the unencoded data
# @raises ValueError if data is not correctly coded.
def gzip_decode(data):
"""gzip encoded data -> unencoded data
Decode data using the gzip content encoding as described in RFC 1952
"""
if not gzip:
raise NotImplementedError
f = BytesIO(data)
gzf = gzip.GzipFile(mode="rb", fileobj=f)
try:
decoded = gzf.read()
except OSError:
raise ValueError("invalid data")
f.close()
gzf.close()
return decoded
##
# Return a decoded file-like object for the gzip encoding
# as described in RFC 1952.
#
# @param response A stream supporting a read() method
# @return a file-like object that the decoded data can be read() from
class GzipDecodedResponse(gzip.GzipFile if gzip else object):
"""a file-like object to decode a response encoded with the gzip
method, as described in RFC 1952.
"""
def __init__(self, response):
#response doesn't support tell() and read(), required by
#GzipFile
if not gzip:
raise NotImplementedError
self.io = BytesIO(response.read())
gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io)
def close(self):
gzip.GzipFile.close(self)
self.io.close()
# --------------------------------------------------------------------
# request dispatcher
class _Method:
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
##
# Standard transport class for XML-RPC over HTTP.
# <p>
# You can create custom transports by subclassing this method, and
# overriding selected methods.
class Transport:
"""Handles an HTTP transaction to an XML-RPC server."""
# client identifier (may be overridden)
user_agent = "Python-xmlrpc/%s" % __version__
#if true, we'll request gzip encoding
accept_gzip_encoding = True
# if positive, encode request using gzip if it exceeds this threshold
# note that many server will get confused, so only use it if you know
# that they can decode such a request
encode_threshold = None #None = don't encode
def __init__(self, use_datetime=False, use_builtin_types=False):
self._use_datetime = use_datetime
self._use_builtin_types = use_builtin_types
self._connection = (None, None)
self._extra_headers = []
##
# Send a complete request, and parse the response.
# Retry request if a cached connection has disconnected.
#
# @param host Target host.
# @param handler Target PRC handler.
# @param request_body XML-RPC request body.
# @param verbose Debugging flag.
# @return Parsed response.
def request(self, host, handler, request_body, verbose=False):
#retry request once if cached connection has gone cold
for i in (0, 1):
try:
return self.single_request(host, handler, request_body, verbose)
except OSError as e:
if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED,
errno.EPIPE):
raise
except http.client.BadStatusLine: #close after we sent request
if i:
raise
def single_request(self, host, handler, request_body, verbose=False):
# issue XML-RPC request
try:
http_conn = self.send_request(host, handler, request_body, verbose)
resp = http_conn.getresponse()
if resp.status == 200:
self.verbose = verbose
return self.parse_response(resp)
except Fault:
raise
except Exception:
#All unexpected errors leave connection in
# a strange state, so we clear it.
self.close()
raise
#We got an error response.
#Discard any response data and raise exception
if resp.getheader("content-length", ""):
resp.read()
raise ProtocolError(
host + handler,
resp.status, resp.reason,
dict(resp.getheaders())
)
##
# Create parser.
#
# @return A 2-tuple containing a parser and a unmarshaller.
def getparser(self):
# get parser and unmarshaller
return getparser(use_datetime=self._use_datetime,
use_builtin_types=self._use_builtin_types)
##
# Get authorization info from host parameter
# Host may be a string, or a (host, x509-dict) tuple; if a string,
# it is checked for a "user:pw@host" format, and a "Basic
# Authentication" header is added if appropriate.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
# @return A 3-tuple containing (actual host, extra headers,
# x509 info). The header and x509 fields may be None.
def get_host_info(self, host):
x509 = {}
if isinstance(host, tuple):
host, x509 = host
auth, host = urllib.parse.splituser(host)
if auth:
auth = urllib.parse.unquote_to_bytes(auth)
auth = base64.encodebytes(auth).decode("utf-8")
auth = "".join(auth.split()) # get rid of whitespace
extra_headers = [
("Authorization", "Basic " + auth)
]
else:
extra_headers = []
return host, extra_headers, x509
##
# Connect to server.
#
# @param host Target host.
# @return An HTTPConnection object
def make_connection(self, host):
#return an existing connection if possible. This allows
#HTTP/1.1 keep-alive.
if self._connection and host == self._connection[0]:
return self._connection[1]
# create a HTTP connection object from a host descriptor
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, http.client.HTTPConnection(chost)
return self._connection[1]
##
# Clear any cached connection object.
# Used in the event of socket errors.
#
def close(self):
if self._connection[1]:
self._connection[1].close()
self._connection = (None, None)
##
# Send HTTP request.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
# @param handler Targer RPC handler (a path relative to host)
# @param request_body The XML-RPC request body
# @param debug Enable debugging if debug is true.
# @return An HTTPConnection.
def send_request(self, host, handler, request_body, debug):
connection = self.make_connection(host)
headers = self._extra_headers[:]
if debug:
connection.set_debuglevel(1)
if self.accept_gzip_encoding and gzip:
connection.putrequest("POST", handler, skip_accept_encoding=True)
headers.append(("Accept-Encoding", "gzip"))
else:
connection.putrequest("POST", handler)
headers.append(("Content-Type", "text/xml"))
headers.append(("User-Agent", self.user_agent))
self.send_headers(connection, headers)
self.send_content(connection, request_body)
return connection
##
# Send request headers.
# This function provides a useful hook for subclassing
#
# @param connection httpConnection.
# @param headers list of key,value pairs for HTTP headers
def send_headers(self, connection, headers):
for key, val in headers:
connection.putheader(key, val)
##
# Send request body.
# This function provides a useful hook for subclassing
#
# @param connection httpConnection.
# @param request_body XML-RPC request body.
def send_content(self, connection, request_body):
#optionally encode the request
if (self.encode_threshold is not None and
self.encode_threshold < len(request_body) and
gzip):
connection.putheader("Content-Encoding", "gzip")
request_body = gzip_encode(request_body)
connection.putheader("Content-Length", str(len(request_body)))
connection.endheaders(request_body)
##
# Parse response.
#
# @param file Stream.
# @return Response tuple and target method.
def parse_response(self, response):
# read response data from httpresponse, and parse it
# Check for new http response object, otherwise it is a file object.
if hasattr(response, 'getheader'):
if response.getheader("Content-Encoding", "") == "gzip":
stream = GzipDecodedResponse(response)
else:
stream = response
else:
stream = response
p, u = self.getparser()
while 1:
data = stream.read(1024)
if not data:
break
if self.verbose:
print("body:", repr(data))
p.feed(data)
if stream is not response:
stream.close()
p.close()
return u.close()
##
# Standard transport class for XML-RPC over HTTPS.
class SafeTransport(Transport):
"""Handles an HTTPS transaction to an XML-RPC server."""
# FIXME: mostly untested
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
if not hasattr(http.client, "HTTPSConnection"):
raise NotImplementedError(
"your version of http.client doesn't support HTTPS")
# create a HTTPS connection object from a host descriptor
# host may be a string, or a (host, x509-dict) tuple
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, http.client.HTTPSConnection(chost,
None, **(x509 or {}))
return self._connection[1]
##
# Standard server proxy. This class establishes a virtual connection
# to an XML-RPC server.
# <p>
# This class is available as ServerProxy and Server. New code should
# use ServerProxy, to avoid confusion.
#
# @def ServerProxy(uri, **options)
# @param uri The connection point on the server.
# @keyparam transport A transport factory, compatible with the
# standard transport class.
# @keyparam encoding The default encoding used for 8-bit strings
# (default is UTF-8).
# @keyparam verbose Use a true value to enable debugging output.
# (printed to standard output).
# @see Transport
class ServerProxy:
"""uri [,options] -> a logical connection to an XML-RPC server
uri is the connection point on the server, given as
scheme://host/target.
The standard implementation always supports the "http" scheme. If
SSL socket support is available (Python 2.0), it also supports
"https".
If the target part and the slash preceding it are both omitted,
"/RPC2" is assumed.
The following options can be given as keyword arguments:
transport: a transport factory
encoding: the request encoding (default is UTF-8)
All 8-bit strings passed to the server proxy are assumed to use
the given encoding.
"""
def __init__(self, uri, transport=None, encoding=None, verbose=False,
allow_none=False, use_datetime=False, use_builtin_types=False):
# establish a "logical" server connection
# get the url
type, uri = urllib.parse.splittype(uri)
if type not in ("http", "https"):
raise OSError("unsupported XML-RPC protocol")
self.__host, self.__handler = urllib.parse.splithost(uri)
if not self.__handler:
self.__handler = "/RPC2"
if transport is None:
if type == "https":
handler = SafeTransport
else:
handler = Transport
transport = handler(use_datetime=use_datetime,
use_builtin_types=use_builtin_types)
self.__transport = transport
self.__encoding = encoding or 'utf-8'
self.__verbose = verbose
self.__allow_none = allow_none
def __close(self):
self.__transport.close()
def __request(self, methodname, params):
# call a method on the remote server
request = dumps(params, methodname, encoding=self.__encoding,
allow_none=self.__allow_none).encode(self.__encoding)
response = self.__transport.request(
self.__host,
self.__handler,
request,
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<ServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self.__request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
def __call__(self, attr):
"""A workaround to get special attributes on the ServerProxy
without interfering with the magic __getattr__
"""
if attr == "close":
return self.__close
elif attr == "transport":
return self.__transport
raise AttributeError("Attribute %r not found" % (attr,))
# compatibility
Server = ServerProxy
# --------------------------------------------------------------------
# test code
if __name__ == "__main__":
# simple test program (from the XML-RPC specification)
# local server, available from Lib/xmlrpc/server.py
server = ServerProxy("http://localhost:8000")
try:
print(server.currentTime.getCurrentTime())
except Error as v:
print("ERROR", v)
multi = MultiCall(server)
multi.getData()
multi.pow(2,9)
multi.add(1,2)
try:
for response in multi():
print(response)
except Error as v:
print("ERROR", v)
|
Flimm/linkchecker
|
refs/heads/master
|
linkcheck/gui/linkchecker_ui_options.py
|
8
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/options.ui'
#
# Created: Fri Feb 28 21:24:59 2014
# by: PyQt4 UI code generator 4.9.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Options(object):
def setupUi(self, Options):
Options.setObjectName(_fromUtf8("Options"))
Options.resize(455, 550)
Options.setMinimumSize(QtCore.QSize(400, 550))
self.verticalLayout_3 = QtGui.QVBoxLayout(Options)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.groupBox_2 = QtGui.QGroupBox(Options)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.widget = QtGui.QWidget(self.groupBox_2)
self.widget.setObjectName(_fromUtf8("widget"))
self.formLayout = QtGui.QFormLayout(self.widget)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.ExpandingFieldsGrow)
self.formLayout.setMargin(0)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName(_fromUtf8("label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.recursionlevel = QtGui.QSpinBox(self.widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.recursionlevel.sizePolicy().hasHeightForWidth())
self.recursionlevel.setSizePolicy(sizePolicy)
self.recursionlevel.setMinimumSize(QtCore.QSize(0, 25))
self.recursionlevel.setMinimum(-1)
self.recursionlevel.setMaximum(100)
self.recursionlevel.setProperty("value", -1)
self.recursionlevel.setObjectName(_fromUtf8("recursionlevel"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.recursionlevel)
self.label_2 = QtGui.QLabel(self.widget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_2)
self.verbose = QtGui.QCheckBox(self.widget)
self.verbose.setEnabled(True)
self.verbose.setText(_fromUtf8(""))
self.verbose.setObjectName(_fromUtf8("verbose"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.verbose)
self.label_4 = QtGui.QLabel(self.widget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_4)
self.debug = QtGui.QCheckBox(self.widget)
self.debug.setText(_fromUtf8(""))
self.debug.setObjectName(_fromUtf8("debug"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.debug)
self.verticalLayout.addWidget(self.widget)
spacerItem = QtGui.QSpacerItem(20, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.label_5 = QtGui.QLabel(self.groupBox_2)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.verticalLayout.addWidget(self.label_5)
self.warninglines = QtGui.QPlainTextEdit(self.groupBox_2)
self.warninglines.setMaximumSize(QtCore.QSize(16777215, 150))
self.warninglines.setObjectName(_fromUtf8("warninglines"))
self.verticalLayout.addWidget(self.warninglines)
self.label_6 = QtGui.QLabel(self.groupBox_2)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout.addWidget(self.label_6)
self.ignorelines = QtGui.QPlainTextEdit(self.groupBox_2)
self.ignorelines.setMaximumSize(QtCore.QSize(16777215, 150))
self.ignorelines.setObjectName(_fromUtf8("ignorelines"))
self.verticalLayout.addWidget(self.ignorelines)
self.verticalLayout_3.addWidget(self.groupBox_2)
self.groupBox = QtGui.QGroupBox(Options)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.label_3 = QtGui.QLabel(self.groupBox)
self.label_3.setWordWrap(True)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.verticalLayout_2.addWidget(self.label_3)
spacerItem1 = QtGui.QSpacerItem(20, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem1)
self.user_config_filename = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.user_config_filename.sizePolicy().hasHeightForWidth())
self.user_config_filename.setSizePolicy(sizePolicy)
self.user_config_filename.setFrameShape(QtGui.QFrame.NoFrame)
self.user_config_filename.setLineWidth(0)
self.user_config_filename.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.user_config_filename.setWordWrap(True)
self.user_config_filename.setMargin(0)
self.user_config_filename.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
self.user_config_filename.setObjectName(_fromUtf8("user_config_filename"))
self.verticalLayout_2.addWidget(self.user_config_filename)
self.user_config_button = QtGui.QPushButton(self.groupBox)
self.user_config_button.setEnabled(False)
self.user_config_button.setToolTip(_fromUtf8(""))
self.user_config_button.setObjectName(_fromUtf8("user_config_button"))
self.verticalLayout_2.addWidget(self.user_config_button)
self.verticalLayout_3.addWidget(self.groupBox)
spacerItem2 = QtGui.QSpacerItem(20, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem2)
self.widget_2 = QtGui.QWidget(Options)
self.widget_2.setObjectName(_fromUtf8("widget_2"))
self.horizontalLayout = QtGui.QHBoxLayout(self.widget_2)
self.horizontalLayout.setMargin(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.closeButton = QtGui.QPushButton(self.widget_2)
self.closeButton.setObjectName(_fromUtf8("closeButton"))
self.horizontalLayout.addWidget(self.closeButton)
spacerItem3 = QtGui.QSpacerItem(317, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem3)
self.verticalLayout_3.addWidget(self.widget_2)
self.retranslateUi(Options)
QtCore.QMetaObject.connectSlotsByName(Options)
def retranslateUi(self, Options):
Options.setWindowTitle(_("Options"))
self.groupBox_2.setTitle(_("Checking options"))
self.label.setToolTip(_("Check recursively all links up to given depth. A negative depth will enable infinite recursion."))
self.label.setText(_("Recursive depth"))
self.recursionlevel.setToolTip(_("Check recursively all links up to given depth. A negative depth will enable infinite recursion."))
self.label_2.setToolTip(_("Log all checked URLs once. Default is to log only errors and warnings."))
self.label_2.setText(_("Verbose output"))
self.verbose.setToolTip(_("Log all checked URLs once. Default is to log only errors and warnings."))
self.label_4.setText(_("Debug"))
self.label_5.setText(_("Warn when one of these strings are found (one per line):"))
self.label_6.setText(_("Ignore URLs matching one of these patterns (one per line):"))
self.groupBox.setTitle(_("Configuration file"))
self.label_3.setText(_("The configuration file holds advanced options and can be edited with an integrated text editor."))
self.user_config_filename.setToolTip(_("Overrides system wide configuration file settings."))
self.user_config_filename.setText(_("/home/user/.linkchecker/linkcheckerrc"))
self.user_config_button.setText(_("Edit"))
self.closeButton.setText(_("Close"))
|
jahrome/viper
|
refs/heads/master
|
viper/common/abstracts.py
|
3
|
# This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
import argparse
class ArgumentErrorCallback(Exception):
def __init__(self, message, level=''):
self.message = message.strip() + '\n'
self.level = level.strip()
def __str__(self):
return '{}: {}'.format(self.level, self.message)
def get(self):
return self.level, self.message
class ArgumentParser(argparse.ArgumentParser):
def print_usage(self):
raise ArgumentErrorCallback(self.format_usage())
def print_help(self):
raise ArgumentErrorCallback(self.format_help())
def error(self, message):
raise ArgumentErrorCallback(message, 'error')
def exit(self, status, message=None):
if message is not None:
raise ArgumentErrorCallback(message)
class Module(object):
cmd = ''
description = ''
command_line = []
args = None
authors = []
output = []
def __init__(self):
self.parser = ArgumentParser(prog=self.cmd, description=self.description)
def set_commandline(self, command):
self.command_line = command
def log(self, event_type, event_data):
self.output.append(dict(
type=event_type,
data=event_data
))
def usage(self):
self.log('', self.parser.format_usage())
def help(self):
self.log('', self.parser.format_help())
def run(self):
try:
self.args = self.parser.parse_args(self.command_line)
except ArgumentErrorCallback as e:
self.log(*e.get())
|
utarsuno/urbtek
|
refs/heads/dev
|
universal_code/excel/excel_file_generator.py
|
1
|
# coding=utf-8
"""
This module, excel_file_generator.py, will be used to generate Excel files.
"""
# This is for creating a new file, I just want to modify an existing template excel file though.
'''
import xlsxwriter
workbook = xlsxwriter.Workbook('hello_world.xlsx')
hours_worksheet = workbook.add_worksheet('Hours')
hours_worksheet.write('A1', 'HELLO HELLO?!')
workbook.close()
'''
import openpyxl
# Check if Ubuntu is running this script.
from sys import platform as _platform
# Needed to get today's date and do other time logic.
import time
import datetime
if _platform == "linux" or _platform == "linux2":
import sys
# sys.path.remove('/home/dev_usr/urbtek/nexus_django')
sys.path.append('/home/dev_usr/urbtek')
file_path = '/dev/urbtek/universal_code/excel/base_excel_hours_logging.xlsx'
else:
file_path = '/Users/utarsuno/git_repos/urbtek/urbtek/universal_code/excel/base_excel_hours_logging.xlsx'
# The date format.
FORMAT = '%m/%d/%Y'
today = datetime.datetime.today()
day_of_week = today.weekday()
to_end_of_week = datetime.timedelta(days = 6 - day_of_week)
end_of_week = today + to_end_of_week
to_beginning_of_week = datetime.timedelta(days = day_of_week)
beginning_of_week = today - to_beginning_of_week
monday = beginning_of_week
tuesday = beginning_of_week + datetime.timedelta(days=1)
wednesday = beginning_of_week + datetime.timedelta(days=2)
thursday = beginning_of_week + datetime.timedelta(days=3)
friday = beginning_of_week + datetime.timedelta(days=4)
saturday = beginning_of_week + datetime.timedelta(days=5)
sunday = beginning_of_week + datetime.timedelta(days=6)
submit_day = beginning_of_week + datetime.timedelta(days=7)
# The cells to work with.
date_submitted = 'K1'
monday_cells = ['E4', 'E199', 'E207', monday.strftime(FORMAT)]
tuesday_cells = ['F4', 'F199', 'F207', tuesday.strftime(FORMAT)]
wednesday_cells = ['G4', 'G199', 'G207', wednesday.strftime(FORMAT)]
thursday_cells = ['H4', 'H199', 'H207', thursday.strftime(FORMAT)]
friday_cells = ['I4', 'I199', 'I207', friday.strftime(FORMAT)]
saturday_cells = ['J4', 'J199', 'J207', saturday.strftime(FORMAT)]
sunday_cells = ['K4', 'K199', 'K207', sunday.strftime(FORMAT)]
hour_cells = [monday_cells, tuesday_cells, wednesday_cells, thursday_cells, friday_cells, saturday_cells, sunday_cells]
# Load the Excel file here.
xfile = openpyxl.load_workbook('/Users/utarsuno/git_repos/urbtek/urbtek/universal_code/excel/base_excel_hours_logging.xlsx')
sheet = xfile.get_sheet_by_name('Sheet1')
sheet[date_submitted] = submit_day.strftime(FORMAT)
for c in hour_cells:
sheet[c[0]] = c[3]
sheet[c[1]] = c[1]
sheet[c[2]] = c[2]
xfile.save('HELLO_TEST_YES.xlsx')
'''
today = datetime.datetime.today()
tomorrow = today + datetime.timedelta(days=1)
print(today.strftime('%m/%d/%Y'))
print(tomorrow.strftime('%m/%d/%Y'))
'''
|
fafaschiavo/lol-api-webapp
|
refs/heads/master
|
lolapiwebapp/stock/apps.py
|
5
|
from __future__ import unicode_literals
from django.apps import AppConfig
class StockConfig(AppConfig):
name = 'stock'
|
epfl-mobots/epuck2-aseba
|
refs/heads/master
|
maintainer/updatedoc/wikidot/tree.py
|
9
|
# Aseba - an event-based framework for distributed robot control
# Copyright (C) 2007--2011:
# Stephane Magnenat <stephane at magnenat dot net>
# (http://stephane.magnenat.net)
# and other contributors, see authors.txt for details
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
class WikiNode:
"""Build a tree, mirroring the structure of a wikidot-based wiki"""
def __init__(self, title, link):
"""x.__init__(title, link) creates a new node x.
This node corresponds to a page with the title set to 'title'
and whose link is 'link'."""
self.title = title
self.link = link
self.children = list()
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
return "{} ({}) - {} children".format(self.title, self.link, len(self.children))
def __getitem__(self, y):
"""x.__getitem__(y) <==> x[y]
Return the y-th child of the node"""
return self.children[y]
def insert(self, title, link, breadcrumbs):
"""Insert a new node into the tree.
Inputs:
title: The page's title
link: The page's URL (can be just the name, or full URL, or a partial path)
breadcrumbs: list listing the URL of the parents, starting from the root
The URLs should be coherent between all inputs.
Output:
Newly inserted node. 'None' if no corresponding parents.
"""
if breadcrumbs == []:
# it is a leaf, insert it
child = WikiNode(title, link)
self.children.append(child)
return child
else:
# search a corresponding child
for x in self.children:
if x.link == breadcrumbs[0]:
# match
return x.insert(title, link, breadcrumbs[1:])
# failure
return None
def dump(self, level=0):
"""Recursively dump to stderr the whole tree"""
print >> sys.stderr, level * ' ' + str(level) + " {} - {}".format(self.title, self.link)
level += 1
for x in self.children:
x.dump(level)
|
tensorflow/datasets
|
refs/heads/master
|
tensorflow_datasets/core/features/video_feature_test.py
|
1
|
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_datasets.core.features.video_feature."""
import json
import os
import pathlib
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_datasets import testing
from tensorflow_datasets.core import features
tf.enable_v2_behavior()
class VideoFeatureTest(testing.FeatureExpectationsTestCase):
@property
def _test_data_path(self):
return os.path.join(os.path.dirname(__file__), '../../testing/test_data')
def test_video_numpy(self):
np_video = np.random.randint(256, size=(128, 64, 64, 3), dtype=np.uint8)
self.assertFeature(
feature=features.Video(shape=(None, 64, 64, 3)),
shape=(None, 64, 64, 3),
dtype=tf.uint8,
tests=[
testing.FeatureExpectationItem(
value=np_video,
expected=np_video,
),
],
test_attributes=dict(_encoding_format='png', _extra_ffmpeg_args=[]))
def test_video_concatenated_frames(self):
video_shape = (None, 400, 640, 3)
lsun_examples_path = os.path.join(self._test_data_path, 'lsun_examples')
frames_paths = [
os.path.join(lsun_examples_path, '{}.jpg'.format(i))
for i in (1, 2, 3, 4)
]
frames = []
for frame_path in frames_paths:
with tf.io.gfile.GFile(frame_path, 'rb') as frame_fp:
frames.append(tf.image.decode_jpeg(frame_fp.read(), channels=3))
video = tf.stack(frames)
# Check both str and Path objects
frames_paths[-1] = pathlib.Path(frames_paths[-1])
self.assertFeature(
feature=features.Video(shape=video_shape),
shape=video_shape,
dtype=tf.uint8,
tests=[
# Numpy array
testing.FeatureExpectationItem(
value=frames_paths,
expected=video,
),
],
)
def test_video_ffmpeg(self):
video_path = os.path.join(self._test_data_path, 'video.mkv')
video_json_path = os.path.join(self._test_data_path, 'video.json')
with tf.io.gfile.GFile(video_json_path) as fp:
video_array = np.asarray(json.load(fp))
self.assertFeature(
feature=features.Video(shape=(5, 4, 2, 3)),
shape=(5, 4, 2, 3),
dtype=tf.uint8,
tests=[
testing.FeatureExpectationItem(
value=video_path,
expected=video_array,
),
testing.FeatureExpectationItem(
value=pathlib.Path(video_path),
expected=video_array,
),
],
)
class GFileWithSeekOnRead(tf.io.gfile.GFile):
def read(self, *args, **kwargs):
data_read = super(GFileWithSeekOnRead, self).read(*args, **kwargs)
self.seek(0)
return data_read
with GFileWithSeekOnRead(video_path, 'rb') as video_fp:
self.assertFeature(
feature=features.Video(shape=(5, 4, 2, 3)),
shape=(5, 4, 2, 3),
dtype=tf.uint8,
tests=[
testing.FeatureExpectationItem(
value=video_fp,
expected=video_array,
),
],
)
if __name__ == '__main__':
testing.test_main()
|
AlmostBetterNetwork/pinecast
|
refs/heads/master
|
podcasts/migrations/0024_auto_20161127_1935.py
|
3
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-27 19:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0023_auto_20161026_0131'),
]
operations = [
migrations.AlterField(
model_name='podcast',
name='author_name',
field=models.CharField(default='Anonymous', max_length=1024),
),
migrations.AlterField(
model_name='podcast',
name='language',
field=models.CharField(default='en-US', max_length=16),
),
migrations.AlterField(
model_name='podcast',
name='subtitle',
field=models.CharField(blank=True, default='', max_length=512),
),
migrations.AlterField(
model_name='podcastcategory',
name='category',
field=models.CharField(choices=[('Music/Easy Listening', 'Music/Easy Listening'), ('Music/Oldies', 'Music/Oldies'), ('Games & Hobbies', 'Games & Hobbies'), ('Government & Organizations/National', 'Government & Organizations/National'), ('TV & Film', 'TV & Film'), ('Music/Hip-Hop & Rap', 'Music/Hip-Hop & Rap'), ('Religion & Spirituality/Spirituality', 'Religion & Spirituality/Spirituality'), ('Music', 'Music'), ('Music/Electronic', 'Music/Electronic'), ('Arts/Performing Arts', 'Arts/Performing Arts'), ('Society & Culture/Personal Journals', 'Society & Culture/Personal Journals'), ('Government & Organizations/Local', 'Government & Organizations/Local'), ('Music/Electronic/Garage', 'Music/Electronic/Garage'), ('Music/Electronic/Breakbeat', 'Music/Electronic/Breakbeat'), ('Music/Electronic/Big Beat', 'Music/Electronic/Big Beat'), ('Comedy', 'Comedy'), ('Education/K-12', 'Education/K-12'), ('Sports & Recreation/College & High School', 'Sports & Recreation/College & High School'), ('Music/R&B & Urban', 'Music/R&B & Urban'), ('Music/Inspirational', 'Music/Inspirational'), ('Music/Soundtracks', 'Music/Soundtracks'), ('Music/Electronic/Downtempo', 'Music/Electronic/Downtempo'), ('Arts', 'Arts'), ('Music/Electronic/Jungle', 'Music/Electronic/Jungle'), ('Games & Hobbies/Other Games', 'Games & Hobbies/Other Games'), ('Technology/IT News', 'Technology/IT News'), ('Music/Electronic/Tribal', 'Music/Electronic/Tribal'), ('Business', 'Business'), ('Business/Investing', 'Business/Investing'), ('Education/Educational Technology', 'Education/Educational Technology'), ('Government & Organizations', 'Government & Organizations'), ('Music/Rock', 'Music/Rock'), ('Health/Self-Help', 'Health/Self-Help'), ('Technology/Podcasting', 'Technology/Podcasting'), ('Society & Culture/Places & Travel', 'Society & Culture/Places & Travel'), ('Games & Hobbies/Aviation', 'Games & Hobbies/Aviation'), ('Music/Electronic/Trip Hop', 'Music/Electronic/Trip Hop'), ('Games & Hobbies/Video Games', 'Games & Hobbies/Video Games'), ('Health/Kids & Family', 'Health/Kids & Family'), ('Technology', 'Technology'), ('Music/Blues', 'Music/Blues'), ('Music/Alternative', 'Music/Alternative'), ('Religion & Spirituality/Judaism', 'Religion & Spirituality/Judaism'), ('Religion & Spirituality', 'Religion & Spirituality'), ('Society & Culture', 'Society & Culture'), ('Music/Electronic/Disco', 'Music/Electronic/Disco'), ('Music/World', 'Music/World'), ('Education/Training', 'Education/Training'), ('Arts/Visual Arts', 'Arts/Visual Arts'), ('Music/Seasonal & Holiday', 'Music/Seasonal & Holiday'), ('Business/Shopping', 'Business/Shopping'), ('Health', 'Health'), ('Education', 'Education'), ('Music/Folk', 'Music/Folk'), ('Business/Management & Marketing', 'Business/Management & Marketing'), ('News & Politics/Conservative (Right)', 'News & Politics/Conservative (Right)'), ('News & Politics/Liberal (Left)', 'News & Politics/Liberal (Left)'), ('Arts/Design', 'Arts/Design'), ('Science & Medicine/Medicine', 'Science & Medicine/Medicine'), ('Arts/Literature', 'Arts/Literature'), ('Sports & Recreation/Amateur', 'Sports & Recreation/Amateur'), ('Society & Culture/Gay & Lesbian', 'Society & Culture/Gay & Lesbian'), ('Religion & Spirituality/Other', 'Religion & Spirituality/Other'), ('Religion & Spirituality/Hinduism', 'Religion & Spirituality/Hinduism'), ('Government & Organizations/Regional', 'Government & Organizations/Regional'), ('Arts/Food', 'Arts/Food'), ('Health/Sexuality', 'Health/Sexuality'), ('Health/Alternative Health', 'Health/Alternative Health'), ('Society & Culture/History', 'Society & Culture/History'), ('Technology/Software How-To', 'Technology/Software How-To'), ('Science & Medicine', 'Science & Medicine'), ('Music/Pop', 'Music/Pop'), ('Sports & Recreation', 'Sports & Recreation'), ('Government & Organizations/Non-Profit', 'Government & Organizations/Non-Profit'), ('Music/Electronic/Techno', 'Music/Electronic/Techno'), ('Music/New Age', 'Music/New Age'), ('Music/Electronic/House', 'Music/Electronic/House'), ('Games & Hobbies/Hobbies', 'Games & Hobbies/Hobbies'), ('Education/Higher Education', 'Education/Higher Education'), ('Music/Electronic/Trance', 'Music/Electronic/Trance'), ('Music/Country', 'Music/Country'), ('Science & Medicine/Social Sciences', 'Science & Medicine/Social Sciences'), ('Business/Careers', 'Business/Careers'), ('Music/Electronic/Ambient', 'Music/Electronic/Ambient'), ('Religion & Spirituality/Buddhism', 'Religion & Spirituality/Buddhism'), ('Games & Hobbies/Automotive', 'Games & Hobbies/Automotive'), ('Science & Medicine/Natural Sciences', 'Science & Medicine/Natural Sciences'), ('Religion & Spirituality/Islam', 'Religion & Spirituality/Islam'), ('Music/Electronic/IDM', 'Music/Electronic/IDM'), ('Music/Latin', 'Music/Latin'), ('Technology/Gadgets', 'Technology/Gadgets'), ('Society & Culture/Philosophy', 'Society & Culture/Philosophy'), ('Arts/Fashion & Beauty', 'Arts/Fashion & Beauty'), ('Sports & Recreation/Outdoor', 'Sports & Recreation/Outdoor'), ('Business/Business News', 'Business/Business News'), ('Health/Fitness & Nutrition', 'Health/Fitness & Nutrition'), ('Music/Electronic/Hard House', 'Music/Electronic/Hard House'), ('Music/Metal', 'Music/Metal'), ('Arts/Spoken Word', 'Arts/Spoken Word'), ('Music/Freeform', 'Music/Freeform'), ('Religion & Spirituality/Christianity', 'Religion & Spirituality/Christianity'), ('Music/Reggae', 'Music/Reggae'), ('Music/Electronic/Progressive', 'Music/Electronic/Progressive'), ('Music/Electronic/Acid House', 'Music/Electronic/Acid House'), ('News & Politics', 'News & Politics'), ('Education/Language Courses', 'Education/Language Courses'), ("Music/Electronic/Drum 'n' Bass", "Music/Electronic/Drum 'n' Bass"), ('Sports & Recreation/Professional', 'Sports & Recreation/Professional'), ('Music/Jazz', 'Music/Jazz')], max_length=128),
),
migrations.AlterField(
model_name='podcastepisode',
name='description',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='podcastepisode',
name='duration',
field=models.PositiveIntegerField(help_text='Audio duration in seconds'),
),
migrations.AlterField(
model_name='podcastepisode',
name='explicit_override',
field=models.CharField(choices=[('none', 'None'), ('expl', 'Explicit'), ('clen', 'Clean')], default='none', max_length=4),
),
migrations.AlterField(
model_name='podcastepisode',
name='subtitle',
field=models.CharField(blank=True, default='', max_length=1024),
),
]
|
artefactual/archivematica-history
|
refs/heads/master
|
src/archivematicaCommon/lib/externals/pyes/contrib/mappings.py
|
5
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyes.es import ES
from pyes import mappings
def mappings_to_code(obj, doc_count=0):
result = []
odict = obj.as_dict()
if isinstance(obj, (mappings.DocumentObjectField, mappings.ObjectField, mappings.NestedObject)):
properties = odict.pop("properties", [])
doc_count += 1
kwargs = ["name=%r" % obj.name,
"type=%r" % odict.pop("type")] +\
["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())]
result.append(
"doc%d=" % doc_count + str(type(obj)).split(".")[-1].strip("'>") + "(" + ', '.join(kwargs) + ")")
for k in sorted(obj.properties.keys()):
result.extend(mappings_to_code(obj.properties[k], doc_count))
else:
kwargs = ["name=%r" % obj.name,
"type=%r" % odict.pop("type"),
"store=%r" % obj.store,
"index=%r" % odict.pop("index")] +\
["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())]
result.append("doc%d.add_property(" % doc_count +\
str(type(obj)).split(".")[-1].strip("'>") + "(" +\
', '.join(kwargs) + "))")
return result
if __name__ == '__main__':
es = ES("192.168.1.1:9200")
res = mappings_to_code(es.mappings.get_doctype("twitter", "twitter"))
print "\n".join(res)
|
runekaagaard/django-contrib-locking
|
refs/heads/master
|
tests/validation/models.py
|
139
|
from __future__ import unicode_literals
from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError('This is not the answer to life, universe and everything!', code='not42')
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column='number_val')
parent = models.ForeignKey('self', blank=True, null=True, limit_choices_to={'number': 10})
email = models.EmailField(blank=True)
ufm = models.ForeignKey('UniqueFieldsModel', to_field='unique_charfield', blank=True, null=True)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe])
slug = models.SlugField(blank=True)
def clean(self):
super(ModelToValidate, self).clean()
if self.number == 11:
raise ValidationError('Invalid number supplied!')
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (('ifield', 'cfield',), ['ifield', 'efield'])
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date")
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
number = models.IntegerField(db_column='number_val',
error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'},
validators=[validate_answer_to_universe]
)
class Author(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now()
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.name
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'})
no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'})
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
ip_verbose_name = models.GenericIPAddressField("IP Address Verbose",
blank=True, null=True)
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(null=True, blank=True, unique=True, unpack_ipv4=True)
# A model can't have multiple AutoFields
# Refs #12467.
assertion_error = None
try:
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
except AssertionError as exc:
assertion_error = exc
assert str(assertion_error) == "A model can't have more than one AutoField."
|
hydroshare/hydroshare_temp
|
refs/heads/master
|
hs_party/forms/__init__.py
|
4
|
__author__ = 'valentin'
|
axelbellec/chronos
|
refs/heads/master
|
chronos/__init__.py
|
1
|
# coding: utf-8
__project__ = 'chronos'
__author__ = 'Axel Bellec'
__licence__ = 'MIT'
__version__ = '0.0.2'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.