hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4701b69ece1ffb86a010433c4dab85c899b9d3ee | 951 | py | Python | App/migrations/0001_initial.py | GnomGad/BackendDjangoTodoServer | 0aae8e5dc751b8914ee81e280a248a4154d8d5c0 | [
"MIT"
] | null | null | null | App/migrations/0001_initial.py | GnomGad/BackendDjangoTodoServer | 0aae8e5dc751b8914ee81e280a248a4154d8d5c0 | [
"MIT"
] | null | null | null | App/migrations/0001_initial.py | GnomGad/BackendDjangoTodoServer | 0aae8e5dc751b8914ee81e280a248a4154d8d5c0 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-07-10 22:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('idPerson', models.IntegerField()),
],
),
migrations.CreateModel(
name='Todo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=350)),
('ittodo', models.IntegerField()),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='App.Person')),
],
),
]
| 29.71875 | 114 | 0.566772 | 825 | 0.867508 | 0 | 0 | 0 | 0 | 0 | 0 | 121 | 0.127234 |
470257d84016a4e45db78acf75fde0f745cbddab | 1,320 | py | Python | tinybot/user.py | codymcelroy/Tinyman-Bot | 6501eb4e8c9ac5e551e064d9d03bef61dbd54705 | [
"MIT"
] | null | null | null | tinybot/user.py | codymcelroy/Tinyman-Bot | 6501eb4e8c9ac5e551e064d9d03bef61dbd54705 | [
"MIT"
] | null | null | null | tinybot/user.py | codymcelroy/Tinyman-Bot | 6501eb4e8c9ac5e551e064d9d03bef61dbd54705 | [
"MIT"
] | null | null | null | from algosdk import mnemonic
from tinyman.v1.client import TinymanMainnetClient
import os
from dotenv import load_dotenv
#Creating Tinyman client object
class User:
def __init__(self) -> None:
pass
def load_env_key(environment_key):
load_dotenv()
env_key = os.getenv(environment_key)
return env_key
def private_key(mnemonic_phrase):
try:
key = User.load_env_key(mnemonic_phrase)
private_key = mnemonic.to_private_key(key)
except:
private_key = mnemonic.to_private_key(mnemonic_phrase)
return private_key
def address(mnemonic_phrase):
try:
key = User.load_env_key(mnemonic_phrase)
public_address = mnemonic.to_public_key(key)
except:
public_address = mnemonic.to_public_key(mnemonic_phrase)
return public_address
def tiny_client(mnemonic_phrase):
public_address = User.address(mnemonic_phrase)
client = TinymanMainnetClient(user_address=public_address)
return client
def main():
key = input("Enter Environment Key or Mnemonic Phrase: ")
client = User.tiny_client(key)
print(f'Tinyman client object created for {client.user_address}')
if __name__ == "__main__":
User.main()
| 27.5 | 73 | 0.668939 | 1,112 | 0.842424 | 0 | 0 | 0 | 0 | 0 | 0 | 144 | 0.109091 |
47037400ff4e76fc857f6ee41c232c1a45585226 | 148 | py | Python | graphgallery/nn/models/pytorch/autoencoder/__init__.py | TobiasSchmidtDE/GraphGallery | e627e4f454e0ce3813171305a524f5190a6e6f45 | [
"MIT"
] | null | null | null | graphgallery/nn/models/pytorch/autoencoder/__init__.py | TobiasSchmidtDE/GraphGallery | e627e4f454e0ce3813171305a524f5190a6e6f45 | [
"MIT"
] | null | null | null | graphgallery/nn/models/pytorch/autoencoder/__init__.py | TobiasSchmidtDE/GraphGallery | e627e4f454e0ce3813171305a524f5190a6e6f45 | [
"MIT"
] | null | null | null | from .decoder import InnerProductDecoder
from .autoencoder import AutoEncoder
from .loss import BCELoss
from .gae import GAE
from .vgae import VGAE
| 24.666667 | 40 | 0.831081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
470412bcb9b63709268258bfb24826c99f45c360 | 2,009 | py | Python | ad_api/api/sb/bid_recommendations.py | mkdir700/python-amazon-ad-api | e82429be4c56f4b56bddfcd70c18dabd4c109406 | [
"MIT"
] | 12 | 2021-11-06T11:12:12.000Z | 2022-03-31T19:10:08.000Z | ad_api/api/sb/bid_recommendations.py | saleweaver/python-amazon-ad-api | 55fca99167ea918d34d5c9e666a4107f10b348a6 | [
"MIT"
] | 12 | 2021-11-06T03:46:59.000Z | 2022-03-11T19:09:58.000Z | ad_api/api/sb/bid_recommendations.py | saleweaver/python-amazon-ad-api | 55fca99167ea918d34d5c9e666a4107f10b348a6 | [
"MIT"
] | 6 | 2021-09-19T00:47:57.000Z | 2022-01-11T13:55:44.000Z | from ad_api.base import Client, sp_endpoint, fill_query_params, ApiResponse
class BidRecommendations(Client):
"""
Use the Amazon Advertising API for Sponsored Brands for campaign, ad group, keyword, negative keyword, drafts, Stores, landing pages, and Brands management operations. For more information about Sponsored Brands, see the Sponsored Brands Support Center. For onboarding information, see the account setup topic.
"""
@sp_endpoint('/sb/recommendations/bids', method='POST')
def get_bid_recommendations(self, **kwargs) -> ApiResponse:
r"""
Get a list of bid recommendation objects for a specified list of keywords or products.
Request Body
| '**campaignId**': *integer($int64)*, {'description': 'The identifier of the campaign for which bid recommendations are created.'}
| '**targets**': *integer($int64)*, {'SBExpression': 'A name value pair that defines a targeting expression. The type field defines the predicate. The value field defines the value to match for the predicate.'}
| '**type**': *string*, {'values': '[ asinCategorySameAs, asinBrandSameAs, asinPriceLessThan, asinPriceBetween, asinPriceGreaterThan, asinReviewRatingLessThan, asinReviewRatingBetween, asinReviewRatingGreaterThan, asinSameAs ]'}
| '**value**': *string*, {'description': 'The text of the targeting expression. The - token defines a range. For example, 2-4 defines a range of 2, 3, and 4.'}
| '**keywords**': *string*, {'description': 'SBBidRecommendationKeyword'}
| '**matchType**': *string*, {'values': '[ broad, exact, phrase ]'}
| '**keywordText**': *string*, {'description': 'The text of the keyword. Maximum of 10 words.'}
| '**adFormat**': *integer($int64)*, {'values': '[ productCollection, video ]'}
Returns:
ApiResponse
"""
return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)
| 59.088235 | 314 | 0.66899 | 1,931 | 0.961175 | 0 | 0 | 1,562 | 0.777501 | 0 | 0 | 1,714 | 0.853161 |
470602cdea3451a6105b9f20948c550da2428a6d | 1,649 | py | Python | rrt.py | dixantmittal/vanilla-rrt-and-variants | 3affcef78491a2a54dd6129470bb17b2ce016c3f | [
"Apache-2.0"
] | 10 | 2019-06-04T17:10:56.000Z | 2022-03-21T02:41:35.000Z | rrt.py | etarakci-hvl/fast-rrt-star | 3affcef78491a2a54dd6129470bb17b2ce016c3f | [
"Apache-2.0"
] | null | null | null | rrt.py | etarakci-hvl/fast-rrt-star | 3affcef78491a2a54dd6129470bb17b2ce016c3f | [
"Apache-2.0"
] | 5 | 2020-04-07T02:11:33.000Z | 2022-03-04T07:48:05.000Z | import numpy as np
import networkx as nx
from commons import *
from tqdm import tqdm
def apply_rrt(state_space, starting_state, target_space, obstacle_map, granularity=0.1, d_threshold=0.5,
n_samples=1000, find_optimal=True):
tree = nx.DiGraph()
tree.add_node(starting_state)
final_state = None
min_cost = None
for i in tqdm(range(n_samples)):
# select node to expand
m_g, random_point = select_node_to_expand(tree, state_space)
# sample a new point
m_new = sample_new_point(m_g, random_point, d_threshold)
# check if m_new lies in space_region
if not lies_in_area(m_new, state_space):
continue
# check if path between(m_g,m_new) defined by motion-model is collision free
if not is_collision_free(m_g, m_new, obstacle_map, granularity):
continue
# if path is free, add new node to tree
tree.add_weighted_edges_from([(m_g, m_new, cartesian_distance(m_g, m_new))])
if lies_in_area(m_new, target_space):
if final_state is None:
final_state = m_new
min_cost = nx.dijkstra_path_length(tree, starting_state, m_new)
if not find_optimal:
break
else:
# if new final state has shorter cost, set it as final state
cost = nx.dijkstra_path_length(tree, starting_state, m_new)
if cost < min_cost:
final_state = m_new
min_cost = cost
if final_state is None:
print("Target not reached.")
return tree, final_state
| 33.653061 | 104 | 0.624621 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 276 | 0.167374 |
4706b5d1ad0c266a57f9f225693953bcaf8bb2a5 | 1,792 | py | Python | migrations/versions/b829c4a4c128_add_sign_hash_table.py | rivendale/ethsigns | 8ffecc264a54b00d5615c6fa36c9f661dce468b5 | [
"MIT"
] | 1 | 2021-01-24T04:27:57.000Z | 2021-01-24T04:27:57.000Z | migrations/versions/b829c4a4c128_add_sign_hash_table.py | rivendale/ethsigns | 8ffecc264a54b00d5615c6fa36c9f661dce468b5 | [
"MIT"
] | 19 | 2021-02-07T18:24:31.000Z | 2021-07-02T08:03:56.000Z | migrations/versions/b829c4a4c128_add_sign_hash_table.py | rivendale/ethsigns | 8ffecc264a54b00d5615c6fa36c9f661dce468b5 | [
"MIT"
] | null | null | null | """add-sign-hash-table
Revision ID: b829c4a4c128
Revises: cc5dce03ad39
Create Date: 2021-05-25 16:04:18.028626
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b829c4a4c128'
down_revision = 'cc5dce03ad39'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sign_hash',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('signhash', sa.String(length=500), nullable=True),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('sign_hash', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_sign_hash_signhash'), ['signhash'], unique=True)
op.create_table('user_sign_hashes',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('sign_hash', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['sign_hash'], ['sign_hash.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['metamask_user.id'], )
)
op.drop_table('user_signs')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user_signs',
sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True),
sa.Column('sign_id', sa.BIGINT(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['sign_id'], ['zodiacs.id'], name='user_signs_sign_id_fkey'),
sa.ForeignKeyConstraint(['user_id'], ['metamask_user.id'], name='user_signs_user_id_fkey')
)
op.drop_table('user_sign_hashes')
with op.batch_alter_table('sign_hash', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_sign_hash_signhash'))
op.drop_table('sign_hash')
# ### end Alembic commands ###
| 33.811321 | 94 | 0.695871 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 732 | 0.408482 |
4708d3e35d8cf6c31327e57cf3dc0a3ada86640d | 3,675 | py | Python | tests/util/test_json_stream.py | MatthiasLohr/bfebench | baca2e18a9c24282ecda99dccfd134fab4c223b3 | [
"Apache-2.0"
] | null | null | null | tests/util/test_json_stream.py | MatthiasLohr/bfebench | baca2e18a9c24282ecda99dccfd134fab4c223b3 | [
"Apache-2.0"
] | null | null | null | tests/util/test_json_stream.py | MatthiasLohr/bfebench | baca2e18a9c24282ecda99dccfd134fab4c223b3 | [
"Apache-2.0"
] | null | null | null | # This file is part of the Blockchain-based Fair Exchange Benchmark Tool
# https://gitlab.com/MatthiasLohr/bfebench
#
# Copyright 2021-2022 Matthias Lohr <mail@mlohr.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from shutil import rmtree
from tempfile import mkdtemp
from time import sleep
from unittest import TestCase
from bfebench.utils.json_stream import (
JsonObjectSocketStreamForwarder,
JsonObjectUnixDomainSocketClientStream,
JsonObjectUnixDomainSocketServerStream,
)
class JsonObjectSocketStream(TestCase):
def setUp(self) -> None:
self._tmp_dir = mkdtemp(prefix="bfebench-test-")
def tearDown(self) -> None:
rmtree(self._tmp_dir)
@property
def tmp_dir(self) -> str:
return self._tmp_dir
def test_server_client(self) -> None:
# init
server = JsonObjectUnixDomainSocketServerStream(os.path.join(self._tmp_dir, "socket"))
client = JsonObjectUnixDomainSocketClientStream(os.path.join(self._tmp_dir, "socket"))
# provide time to set up the server
sleep(0.1)
# send message from client to server
client.send_object({"foo": "bar"})
received, bytes_count = server.receive_object()
self.assertEqual(received, {"foo": "bar"})
self.assertEqual(bytes_count, 14)
# send message from server to client
server.send_object({"reply": 42})
received, bytes_count = client.receive_object()
self.assertEqual(received, {"reply": 42})
self.assertEqual(bytes_count, 13)
# send nested message from client to server
nested_test_data = {
"list": ["a", "b"],
"object": {"foo": "bar"},
"list_with_objects": [{"a": 1}, {"b": 2}],
"object_with_lists": {"a": [1, 2], "b": [3, 4]},
"object_with_objects": {"a": {"foo": "bar"}},
}
client.send_object(nested_test_data)
received, bytes_count = server.receive_object()
self.assertEqual(nested_test_data, received)
class JsonObjectSocketStreamForwarderTest(TestCase):
def setUp(self) -> None:
self._tmp_dir = mkdtemp(prefix="bfebench-test-")
def tearDown(self) -> None:
rmtree(self._tmp_dir, ignore_errors=True)
@property
def tmp_dir(self) -> str:
return self._tmp_dir
def test_forward(self) -> None:
s1 = JsonObjectUnixDomainSocketServerStream(os.path.join(self._tmp_dir, "s1"))
s2 = JsonObjectUnixDomainSocketServerStream(os.path.join(self._tmp_dir, "s2"))
forwarder = JsonObjectSocketStreamForwarder(s1, s2)
forwarder.start()
sleep(0.1)
c1 = JsonObjectUnixDomainSocketClientStream(os.path.join(self._tmp_dir, "s1"))
c2 = JsonObjectUnixDomainSocketClientStream(os.path.join(self._tmp_dir, "s2"))
sleep(0.1)
c1.send_object({"foo": "bar"})
received, bytes_count = c2.receive_object()
self.assertEqual(received, {"foo": "bar"})
self.assertEqual(bytes_count, 14)
stats = forwarder.get_stats()
self.assertEqual(stats.count_1to2, 1)
self.assertEqual(stats.bytes_1to2, 14)
| 34.345794 | 94 | 0.670476 | 2,662 | 0.724354 | 0 | 0 | 136 | 0.037007 | 0 | 0 | 1,091 | 0.296871 |
4708f606f6729a7f4233abfc1e9dcd8087cb2f67 | 196 | py | Python | python-objects_statements_and_data_structures-practice-master/even_in_range.py | valcal/python_practice | bbc9b7075a5af7b7461afa322acb8c946bad1024 | [
"MIT"
] | null | null | null | python-objects_statements_and_data_structures-practice-master/even_in_range.py | valcal/python_practice | bbc9b7075a5af7b7461afa322acb8c946bad1024 | [
"MIT"
] | null | null | null | python-objects_statements_and_data_structures-practice-master/even_in_range.py | valcal/python_practice | bbc9b7075a5af7b7461afa322acb8c946bad1024 | [
"MIT"
] | null | null | null | """
EVEN IN RANGE: Use range() to print all the even numbers from 0 to 10.
"""
for number in range(11):
if number % 2 == 0:
if number == 0:
continue
print(number)
| 19.6 | 70 | 0.55102 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.397959 |
470a71e16d544812e51ec3558f13c543292f57e3 | 935 | py | Python | DummyTile.py | godesab/minesweep | 06396f5be237438411a4df6370b739d0d38e89f3 | [
"MIT"
] | null | null | null | DummyTile.py | godesab/minesweep | 06396f5be237438411a4df6370b739d0d38e89f3 | [
"MIT"
] | null | null | null | DummyTile.py | godesab/minesweep | 06396f5be237438411a4df6370b739d0d38e89f3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
class DummyTile:
def __init__(self, pos, label=None):
self.pos = pos
# Label tells adjacent mine count for this tile. Int between 0...8 or None if unknown
self.label = label
self.checked = False
self.marked = False
self.adj_mines = None
self.adj_tiles = 0
self.adj_checked = 0
self.adj_unchecked = None
def set_label(self, label):
self.label = label
self.checked = True
def set_adj_mines(self, count):
self.adj_mines = count
self.checked = True
def set_adj_tiles(self, count):
self.adj_tiles = count
def set_adj_checked(self, count):
self.adj_checked = count
def add_adj_checked(self):
self.adj_checked = self.adj_checked + 1
def set_adj_unchecked(self, count):
self.adj_unchecked = count
def mark(self):
self.marked = True
| 23.974359 | 93 | 0.605348 | 908 | 0.971123 | 0 | 0 | 0 | 0 | 0 | 0 | 108 | 0.115508 |
470b82a1389643eded862261d0f38a66e716c512 | 4,336 | py | Python | ameilisearch/_httprequests.py | SaidBySolo/ameilisearch | 017883b6c480e2e3633b8474060387c59588f357 | [
"MIT"
] | null | null | null | ameilisearch/_httprequests.py | SaidBySolo/ameilisearch | 017883b6c480e2e3633b8474060387c59588f357 | [
"MIT"
] | null | null | null | ameilisearch/_httprequests.py | SaidBySolo/ameilisearch | 017883b6c480e2e3633b8474060387c59588f357 | [
"MIT"
] | null | null | null | import json
from typing import Any, Dict, List, Optional, Type, Union
from types import TracebackType
from aiohttp.client_reqrep import ClientResponse
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal
from aiohttp.client import ClientConnectionError, ClientResponseError, ClientSession
from aiohttp.client_exceptions import ServerTimeoutError
from ameilisearch.config import Config
from ameilisearch.errors import (
MeiliSearchApiError,
MeiliSearchCommunicationError,
MeiliSearchTimeoutError,
)
class HttpRequests:
def __init__(self, config: Config) -> None:
self.config = config
self.headers = {
"Authorization": f"Bearer {self.config.api_key}",
}
self.session: Optional[ClientSession] = None
async def send_request(
self,
method: Literal["GET", "POST", "PUT", "PATCH", "DELETE"],
path: str,
body: Optional[
Union[Dict[str, Any], List[Dict[str, Any]], List[str], str]
] = None,
content_type: Optional[str] = None,
) -> Any:
if not self.session or self.session.closed:
self.session = ClientSession()
if content_type:
self.headers["Content-Type"] = content_type
self.headers = {k: v for k, v in self.headers.items() if v is not None}
try:
request_path = self.config.url + "/" + path
if isinstance(body, bytes):
response = await self.session.request(
method,
request_path,
timeout=self.config.timeout,
headers=self.headers,
data=body,
)
else:
response = await self.session.request(
method,
request_path,
timeout=self.config.timeout,
headers=self.headers,
data=json.dumps(body) if body else None,
)
return await self.__validate(response)
except ServerTimeoutError as err:
raise MeiliSearchTimeoutError(str(err)) from err
except ClientConnectionError as err:
raise MeiliSearchCommunicationError(str(err)) from err
async def get(self, path: str) -> Any:
return await self.send_request("GET", path)
async def post(
self,
path: str,
body: Optional[
Union[Dict[str, Any], List[Dict[str, Any]], List[str], str]
] = None,
content_type: Optional[str] = "application/json",
) -> Any:
return await self.send_request("POST", path, body, content_type)
async def patch(
self,
path: str,
body: Optional[Union[Dict[str, Any], List[Dict[str, Any]], List[str], str]] = None,
content_type: Optional[str] = 'application/json',
) -> Any:
return self.send_request("PATCH", path, body, content_type)
async def put(
self,
path: str,
body: Optional[Union[Dict[str, Any], List[Dict[str, Any]], List[str]]] = None,
content_type: Optional[str] = "application/json",
) -> Any:
return await self.send_request("PUT", path, body, content_type)
async def delete(
self,
path: str,
body: Optional[Union[Dict[str, Any], List[Dict[str, Any]], List[str]]] = None,
) -> Any:
return await self.send_request("DELETE", path, body)
@staticmethod
async def __to_json(content: bytes, request: ClientResponse) -> Any:
if content == b"":
return request
return json.loads(content)
@staticmethod
async def __validate(request: ClientResponse) -> Any:
content = await request.content.read()
try:
request.raise_for_status()
return await HttpRequests.__to_json(content, request)
except ClientResponseError as err:
raise MeiliSearchApiError(str(err), content, request.status) from err
async def __aenter__(self):
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
):
if self.session:
await self.session.close()
| 32.848485 | 91 | 0.594788 | 3,775 | 0.870618 | 0 | 0 | 536 | 0.123616 | 3,433 | 0.791744 | 182 | 0.041974 |
470c4a8d64bc83e0bb784d92aef60d7c0b20c194 | 2,711 | py | Python | cuida24/backend/cuida24/services_habits.py | afonscosta/LEI_1819 | e25641cc46d06ab25e54f3366adbbe02496aedcb | [
"MIT"
] | null | null | null | cuida24/backend/cuida24/services_habits.py | afonscosta/LEI_1819 | e25641cc46d06ab25e54f3366adbbe02496aedcb | [
"MIT"
] | 11 | 2019-12-04T23:42:06.000Z | 2022-02-10T08:21:13.000Z | cuida24/backend/cuida24/services_habits.py | afonscosta/LEI_1819 | e25641cc46d06ab25e54f3366adbbe02496aedcb | [
"MIT"
] | 1 | 2019-06-26T16:49:31.000Z | 2019-06-26T16:49:31.000Z | from django.utils import timezone
from django.shortcuts import get_object_or_404
from backend.cuida24.serializers import *
logger = logging.getLogger("mylogger")
def habitsFrontToBackJSON(request_data, user):
request_data['caregiver'] = get_object_or_404(Caregiver, info=user.pk).pk
return request_data
def SOSFrontToBackJSON(request_data, user):
request_data['caregiver'] = get_object_or_404(Caregiver, info=user.pk).pk
request_data['patient'] = get_object_or_404(Patient, caregiver=request_data['caregiver']).pk
return request_data
def getGoals(caregiver):
date_now = timezone.now()
goals = Goal.objects.filter(disable=False)
choices_value = dict(Goal.TYPE)
return_data = {}
for goal in goals:
dateB = goal.dateBegin
dateE = goal.dateEnd
logger.info(goal.dateBegin)
logger.info(date_now)
logger.info(goal.dateEnd)
if dateB <= date_now <= dateE:
realized = 0
if goal.type == 'AF' or goal.type == 'LS' or goal.type == 'LI':
realized = Activity.objects.filter(type=goal.type, caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'WT':
realized = Water.objects.filter(caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'NP':
realized = Nap.objects.filter(caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'SP':
realized = Sleep.objects.filter(caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'SS':
realized = SOS.objects.filter(caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'PA' or goal.type == 'LM' or goal.type == 'AL' or goal.type == 'LT' or goal.type == 'JT':
realized = Meal.objects.filter(type=goal.type, caregiver=caregiver,
date__range=(dateB, dateE)).count()
if goal.type == 'CB' or goal.type == 'FT' or goal.type == 'VG' or goal.type == 'FB' or goal.type == 'PC' or goal.type == 'RF' or goal.type == 'AL':
realized = Meal.objects.filter(food=goal.type, caregiver=caregiver,
date__range=(dateB, dateE)).count()
return_data[str(goal.type)] = {'type': choices_value[goal.type], 'realized': realized, 'goal': goal.goal}
return return_data
| 49.290909 | 159 | 0.565474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.05533 |
470c68bee171c558b2fa27895830521e28ccc585 | 7,758 | py | Python | tests/acceptance/features/component/environment.py | AlvaroVega/fiware-facts | 6224dd8d87c361bde5eb87f1d9d61a386c5632a8 | [
"Apache-2.0"
] | 1 | 2017-11-28T14:52:49.000Z | 2017-11-28T14:52:49.000Z | tests/acceptance/features/component/environment.py | AlvaroVega/fiware-facts | 6224dd8d87c361bde5eb87f1d9d61a386c5632a8 | [
"Apache-2.0"
] | 70 | 2015-01-26T17:32:39.000Z | 2018-08-15T14:28:19.000Z | tests/acceptance/features/component/environment.py | AlvaroVega/fiware-facts | 6224dd8d87c361bde5eb87f1d9d61a386c5632a8 | [
"Apache-2.0"
] | 9 | 2015-01-22T09:29:19.000Z | 2020-03-05T19:27:56.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is parpt of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
#
__author__ = "@jframos"
from qautils.logger.logger_utils import get_logger
from qautils.configuration.configuration_utils import set_up_project
from fiwarefacts_client.client import FactsClient
from fiwarecloto_client.client import ClotoClient
from commons.rabbit_utils import RabbitMQConsumer, RabbitMQPublisher
import qautils.configuration.configuration_utils as configuration_utils
from fiwarefacts_client.window_size_model_utils import get_window_size_rabbitmq_message
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_PROTOCOL, \
PROPERTIES_CONFIG_SERVICE_RESOURCE, PROPERTIES_CONFIG_SERVICE_PORT, PROPERTIES_CONFIG_SERVICE_HOST, \
PROPERTIES_CONFIG_SERVICE_OS_USERNAME, PROPERTIES_CONFIG_SERVICE_OS_PASSWORD, \
PROPERTIES_CONFIG_SERVICE_OS_TENANT_ID, PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL, PROPERTIES_CONFIG_SERVICE_USER, \
PROPERTIES_CONFIG_SERVICE_PASSWORD
from commons.constants import * # All custom constants are used in this file.
import time
__logger__ = get_logger(__name__)
def before_all(context):
__logger__.info("START ...")
__logger__.info("Setting UP acceptance test project ")
set_up_project() # Load setting using 'qautils.configuration.configuration_utils'
# Save tenantId
context.tenant_id = \
configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_TENANT_ID]
# Create REST Clients
context.facts_client = FactsClient(
protocol=configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
host=configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
port=configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
resource=configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
context.cloto_client = ClotoClient(
username=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_USERNAME],
password=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_PASSWORD],
tenant_id=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_TENANT_ID],
auth_url=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL],
api_protocol=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
api_host=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
api_port=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
api_resource=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
def before_feature(context, feature):
__logger__.info("=========== START FEATURE =========== ")
__logger__.info("Feature name: %s", feature.name)
def before_scenario(context, scenario):
__logger__.info("********** START SCENARIO **********")
__logger__.info("Scenario name: %s", scenario.name)
# Clean scenario variables
context.context_elements = dict()
context.response = None
# List of RabbitMQ Consumers for testing purposes. This list is necessary to be used as Multi-Tenancy test cases.
# By default, this list only will have information for the main tenant used in test cases. Additional RabbitMQ
# consumers should be added by each test case if they are needed.
context.rabbitmq_consumer_list = list()
# Init RabbitMQ consumer and append it on the list - Main tenantId
context.rabbitmq_consumer = RabbitMQConsumer(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_message_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES]
context.rabbitmq_consumer.exchange = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.rabbitmq_consumer.exchange_type = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE]
context.rabbitmq_consumer_list.append(context.rabbitmq_consumer)
# Init RabbitMQ publisher
context.rabbitmq_publisher = RabbitMQPublisher(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_window_size_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_WINDOW_SIZE]
context.rabbitmq_publisher.exchange = \
facts_window_size_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.rabbitmq_publisher.routing_key = \
facts_window_size_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_ROUTING_KEY]
# Set default window size to 2 (FACTS), for the main tenantId configured
message = get_window_size_rabbitmq_message(context.tenant_id, FACTS_DEFAULT_WINDOW_SIZE)
context.rabbitmq_publisher.send_message(message)
def after_scenario(context, scenario):
__logger__.info("********** END SCENARIO **********")
# Close all RabbitMQ consumers (if initiated)
for consumer in context.rabbitmq_consumer_list:
try:
consumer.stop()
except Exception:
__logger__.warn("Rabbitmq consumer was already stopped")
try:
consumer.close_connection()
except Exception:
__logger__.warn("Rabbitmq consumer was already closed connection")
# Close RabbitMQ publisher (if initiated)
context.rabbitmq_publisher.close()
# Wait for grace period defined (FACTS component) to delete all stored facts.
grace_period = \
configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_FACTS_SERVICE_GRACE_PERIOD]
__logger__.info("Explicit wait for FACTS grace period: %d seconds", grace_period)
time.sleep(grace_period)
def after_feature(context, feature):
__logger__.info("=========== END FEATURE =========== ")
def after_all(context):
__logger__.info("... END :)")
| 45.905325 | 121 | 0.792472 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,956 | 0.252062 |
470cfac6bd7f23ed4cc1c90ca9f96df0ee2b0c3e | 1,791 | py | Python | savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2021-04-18T09:30:54.000Z | 2021-04-18T09:30:54.000Z | savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2019-07-30T12:31:51.000Z | 2019-07-30T12:31:51.000Z | savu/plugins/azimuthal_integrators/pyfai_azimuthal_integrator.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2021-05-20T16:31:29.000Z | 2021-05-20T16:31:29.000Z | # Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: pyfai_azimuthal_integrator
:platform: Unix
:synopsis: A plugin to integrate azimuthally "symmetric" signals i.e. SAXS,\
WAXS or XRD.Requires a calibration file
.. moduleauthor:: Aaron D. Parsons <scientificsoftware@diamond.ac.uk>
"""
import logging
from savu.plugins.azimuthal_integrators.base_azimuthal_integrator import \
BaseAzimuthalIntegrator
from savu.plugins.utils import register_plugin
@register_plugin
class PyfaiAzimuthalIntegrator(BaseAzimuthalIntegrator):
"""
1D azimuthal integrator by pyFAI
:param use_mask: Should we mask. Default: False.
:param num_bins: number of bins. Default: 1005.
"""
def __init__(self):
logging.debug("Starting 1D azimuthal integrationr")
super(PyfaiAzimuthalIntegrator,
self).__init__("PyfaiAzimuthalIntegrator")
def process_frames(self, data):
logging.debug("Running azimuthal integration")
ai = self.params[3]
logging.debug('datashape=%s' % str(data[0].shape))
axis, remapped = \
ai.integrate1d(data=data[0], npt=self.npts, unit='q_A^-1',
correctSolidAngle=False)
return remapped
| 33.792453 | 79 | 0.712451 | 749 | 0.418202 | 0 | 0 | 766 | 0.427694 | 0 | 0 | 1,108 | 0.618649 |
470eba693eedc484f3528cebad4ea8fde2a80a34 | 47 | py | Python | Exercise/t4.py | Twenkid/Python-Various | cb6d704724d0e0325cf05a2f95b08cc892ff0857 | [
"MIT"
] | null | null | null | Exercise/t4.py | Twenkid/Python-Various | cb6d704724d0e0325cf05a2f95b08cc892ff0857 | [
"MIT"
] | null | null | null | Exercise/t4.py | Twenkid/Python-Various | cb6d704724d0e0325cf05a2f95b08cc892ff0857 | [
"MIT"
] | null | null | null | #t4.py
def kaka(a):
print(a*465546 + 2342) | 11.75 | 23 | 0.595745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.148936 |
470f7eeda75283f4e7198eb5c53e3a28834ff646 | 1,227 | py | Python | config.py | paris3200/flask-inventory | fe858e5c12a8e193590e978e573a3891dfde37de | [
"MIT"
] | 6 | 2016-05-29T05:24:39.000Z | 2021-12-02T14:23:36.000Z | config.py | paris3200/flask-inventory | fe858e5c12a8e193590e978e573a3891dfde37de | [
"MIT"
] | 20 | 2016-11-04T23:22:25.000Z | 2019-10-21T13:39:49.000Z | config.py | paris3200/flask-inventory | fe858e5c12a8e193590e978e573a3891dfde37de | [
"MIT"
] | 7 | 2016-03-27T12:19:33.000Z | 2019-11-18T23:11:23.000Z | # config.jinja2
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class BaseConfig(object):
"""Base configuration."""
SECRET_KEY = '8abcd123352a91a90aecd86d3d0dc5a5844894c24338ad13639bc593fdb20330d67a'
DEBUG = False
BCRYPT_LOG_ROUNDS = 13
WTF_CSRF_ENABLED = True
DEBUG_TB_ENABLED = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
class DevConfig(BaseConfig):
"""Development configuration."""
DEBUG = True
BCRYPT_LOG_ROUNDS = 1
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'dev.sqlite')
PICTURES_FOLDER = os.path.join(basedir,'app','static','pictures')
SQLALCHEMY_TRACK_MODIFICATIONS = False
DEBUG_TB_ENABLED = False
class TestingConfig(BaseConfig):
"""Testing configuration."""
DEBUG = True
TESTING = True
BCRYPT_LOG_ROUNDS = 1
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///'
DEBUG_TB_ENABLED = False
class ProductionConfig(BaseConfig):
"""Production configuration."""
SECRET_KEY = 'ljasdfUgreKnyvZRm8R77FkkoS9ab5duf0ypmABtkWPk3ESf8DnbVVL5K84ssyUEeSA'
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'postgresql:///inventory'
DEBUG_TB_ENABLED = False
| 27.886364 | 87 | 0.731051 | 1,135 | 0.92502 | 0 | 0 | 0 | 0 | 0 | 0 | 354 | 0.288509 |
470fe23973530032b500905cb2a05ec7d7b4bff3 | 238 | py | Python | ensysmod/model/ts_capacity_max.py | NOWUM/EnSysMod | 18c8a2198db3510e667c1f0298d00a3dfcb0aab7 | [
"MIT"
] | 1 | 2021-12-10T19:41:01.000Z | 2021-12-10T19:41:01.000Z | ensysmod/model/ts_capacity_max.py | NOWUM/EnSysMod | 18c8a2198db3510e667c1f0298d00a3dfcb0aab7 | [
"MIT"
] | 83 | 2021-10-20T22:54:28.000Z | 2022-03-24T19:07:06.000Z | ensysmod/model/ts_capacity_max.py | NOWUM/EnSysMod | 18c8a2198db3510e667c1f0298d00a3dfcb0aab7 | [
"MIT"
] | null | null | null | from sqlalchemy import Column, PickleType
from ensysmod.database.base_class import Base
from ensysmod.database.ref_base_class import RefCRBase
class CapacityMax(RefCRBase, Base):
max_capacities = Column(PickleType, nullable=False)
| 26.444444 | 55 | 0.827731 | 91 | 0.382353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4710ee05a8d41a4dc8d0cb9674e01b883d964fbf | 1,262 | py | Python | ui/AterAuiNotebook.py | yeweiasia/ater | 7d3408adc576abc9dda4f29492f80460dce3f89a | [
"MIT"
] | 2 | 2018-11-22T15:12:32.000Z | 2018-12-12T06:19:29.000Z | ui/AterAuiNotebook.py | yeweiasia/ater | 7d3408adc576abc9dda4f29492f80460dce3f89a | [
"MIT"
] | null | null | null | ui/AterAuiNotebook.py | yeweiasia/ater | 7d3408adc576abc9dda4f29492f80460dce3f89a | [
"MIT"
] | 1 | 2018-12-12T06:19:33.000Z | 2018-12-12T06:19:33.000Z | import time
import wx.aui as aui
from SessionManager import SessionManager
class AterAuiNotebook(aui.AuiNotebook):
def __init__(self, parent):
aui.AuiNotebook.__init__(self, parent=parent)
self.parent = parent
self.default_style = aui.AUI_NB_TOP | aui.AUI_NB_TAB_SPLIT | aui.AUI_NB_TAB_MOVE | aui.AUI_NB_SCROLL_BUTTONS | aui.AUI_NB_CLOSE_ON_ALL_TABS
self.SetWindowStyleFlag(self.default_style)
def addPanel(self, label, panel, backend, selected):
page = panel(self, None)
# backendThread = threading.Thread(target=self.initBackendThread, args=(backend, page))
# backendThread.start()
self.AddPage(page, label, selected)
self.initBackend(backend, page)
#pass ui output method to backend thread
def initBackend(self, backend, page):
#init session if session is None
if not page.session:
#TO-DO: open session dialog to get connnection info
page.session = SessionManager(self.parent)
if len(page.session.sessionManagerDialog.auth_info.hostname) == 0:
return
term = backend(page.writeText, page.session.sessionManagerDialog.auth_info)
page.sendToBackend = term.sendChar
page.term = term
| 36.057143 | 147 | 0.692552 | 1,183 | 0.937401 | 0 | 0 | 0 | 0 | 0 | 0 | 233 | 0.184628 |
4711fa4a5f248f729dbb574084def1809fe1751a | 36,453 | py | Python | pyyeti/cla/_rptpct1.py | twmacro/pyye | c4febd44be836bd87368da13c1fb0cf82838b687 | [
"BSD-3-Clause"
] | null | null | null | pyyeti/cla/_rptpct1.py | twmacro/pyye | c4febd44be836bd87368da13c1fb0cf82838b687 | [
"BSD-3-Clause"
] | null | null | null | pyyeti/cla/_rptpct1.py | twmacro/pyye | c4febd44be836bd87368da13c1fb0cf82838b687 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Low level tool for writing percent difference reports. Typically, this
is called via: :func:`cla.DR_Results.rptpct`.
"""
from io import StringIO
from types import SimpleNamespace
import warnings
import numpy as np
import matplotlib.pyplot as plt
from pyyeti import ytools, locate, writer
from ._utilities import _get_rpt_headers, _get_numform, _proc_filterval
from ._magpct import magpct
__all__ = ["rptpct1"]
# FIXME: We need the str/repr formatting used in Numpy < 1.14.
try:
np.set_printoptions(legacy="1.13")
except TypeError:
pass
def _apply_pv(value, pv, oldlen):
# if value has a len that's > 1, try to partition it down;
# otherwise, return it as is:
try:
n = len(value)
except TypeError:
return value
else:
if n == 1:
return value
# `value` is a vector with len > 1 ... ensure it is a true numpy
# array:
value = np.atleast_1d(value)
# oldlen is either 0 (for `value` vectors that are expected to be
# full size ... currently, only the `filterval` and
# `magpct_filterval` vectors), or it is the length of the
# dimension that the `value` index type of partition vector
# (currently, only the `ignorepv` vector) was originally defined
# to partition.
if oldlen == 0:
# `value` is `filterval` or `magpct_filterval` ... these just
# need to be partitioned down:
newvalue = value[pv]
else:
# `value` is `ignorepv` ... it needs to be redefined to
# correspond to reduced size:
truefalse = locate.index2bool(value, oldlen)
newvalue = truefalse[pv].nonzero()[0]
return newvalue
def _align_mxmn(mxmn1, mxmn2, labels2, row_number, infodct):
if infodct["labels"] and infodct["labels"] != labels2:
n = len(infodct["labels"])
pv1, pv2 = locate.list_intersect(infodct["labels"], labels2)
mxmn1 = mxmn1[pv1]
mxmn2 = mxmn2[pv2]
infodct["labels"] = [infodct["labels"][i] for i in pv1]
row_number = row_number[pv1]
infodct["filterval"] = _apply_pv(infodct["filterval"], pv1, 0)
infodct["magpct_filterval"] = _apply_pv(infodct["magpct_filterval"], pv1, 0)
infodct["ignorepv"] = _apply_pv(infodct["ignorepv"], pv1, n)
return mxmn1, mxmn2, row_number
def _get_filtline(filterval):
if len(filterval) > 1:
filtline = "Filter: <defined row-by-row>\n"
else:
filtline = f"Filter: {filterval[0]}\n"
return filtline
def _get_noteline(use_range, names, prtbads, flagbads):
noteline = "Notes: "
tab = " "
if not use_range:
noteline += "% Diff = +/- abs(({0}-{1})/{1})*100\n".format(*names)
else:
noteline += "% Diff = +/- abs({0}-{1})/max(abs({1}(max,min)))*100\n".format(
*names
)
noteline += tab + "Sign set such that positive % differences indicate exceedances\n"
prtbad, prtbadh, prtbadl = prtbads
flagbad, flagbadh, flagbadl = flagbads
if prtbad is not None or prtbadh is not None or prtbadl is not None:
if prtbad is not None:
prtbad = abs(prtbad)
noteline += tab + f"Printing rows where abs(% Diff) > {prtbad}%\n"
elif prtbadh is not None:
noteline += tab + f"Printing rows where % Diff > {prtbadh}%\n"
else:
noteline += tab + f"Printing rows where % Diff < {prtbadl}%\n"
if flagbad is not None or flagbadh is not None or flagbadl is not None:
if flagbad is not None:
flagbad = abs(flagbad)
noteline += tab + f"Flagging (*) rows where abs(% Diff) > {flagbad}%\n"
elif flagbadh is not None:
noteline += tab + f"Flagging (*) rows where % Diff > {flagbadh}%\n"
else:
noteline += tab + f"Flagging (*) rows where % Diff < {flagbadl}%\n"
return noteline
def _get_badpv(pct, pv, bad, badh, badl, defaultpv=False):
if bad is not None or badh is not None or badl is not None:
badpv = pv.copy()
if bad is not None:
badpv &= abs(pct) > bad
elif badh is not None:
badpv &= pct > badh
else:
badpv &= pct < badl
else:
badpv = np.empty(len(pct), bool)
badpv[:] = defaultpv
return badpv
def _get_pct_diff(a, b, filt, pv, nastring, mxmn_b=None, ismax=True, flagbads=None):
# either can pass filter to be kept:
pv &= (abs(a) > filt) | (abs(b) > filt)
if mxmn_b is not None:
denom = np.nanmax(abs(mxmn_b), axis=1)
else:
denom = abs(b)
# put 1's in for filtered values ... this is temporary
a = a.copy()
b = b.copy()
a[~pv] = 1.0
b[~pv] = 1.0
z = denom == 0.0
denom[z] = 1.0
pct = 100 * abs(a - b) / denom
pct[z] = 100.0 # np.inf
# make less extreme values negative
neg = a < b if ismax else a > b
pct[neg] *= -1.0
# put nan's in for the filtered or n/a rows:
pct[~pv] = np.nan
# make 7 char version:
spct = [f"{p:7.2f}" for p in pct]
badpv = _get_badpv(pct, pv, *flagbads, False)
for j in badpv.nonzero()[0]:
spct[j] += "*"
for j in (~pv).nonzero()[0]:
spct[j] = nastring
return pct, spct
def _get_histogram_str(desc, hdr, pctinfo):
pctcount = pctinfo["hsto"]
s = [
(f"\n\n {desc} - {hdr} Comparison Histogram\n\n"),
(" % Diff Count Percent\n -------- -------- -------\n"),
]
with StringIO() as f:
writer.vecwrite(f, " {:8.2f} {:8.0f} {:7.2f}\n", pctcount)
s.append(f.getvalue())
s.append("\n")
# total_percent_10 will either be 0 or 1000:
# - 0 if all % diffs are "n/a"
# - 1000 otherwise
total_percent_10 = np.round(pctcount[:, 2].sum() * 10)
last = -1.0
for pdiff in [1, 2, 5, 10, 15, 20, 25, 50, 100, 500]:
pvdiff = abs(pctcount[:, 0]) <= pdiff
num = pctcount[pvdiff, 2].sum()
if num > last:
s.append(f" {num:.1f}% of values are within {pdiff}%\n")
if np.round(num * 10) == total_percent_10:
break
last = num
pct = pctinfo["pct"]
n = len(pct)
if n == 0:
s.append(
"\n % Diff Statistics: [Min, Max, Mean, StdDev]"
" = [n/a, n/a, n/a, n/a]\n"
)
else:
stddev = 0.0 if n <= 1 else pct.std(ddof=1)
s.append(
"\n % Diff Statistics: [Min, Max, Mean, StdDev]"
f" = [{pct.min():.2f}, {pct.max():.2f}, {pct.mean():.4f}, {stddev:.4f}]\n"
)
return "".join(s)
def _proc_pct(
ext1,
ext2,
filterval,
magpct_filterval,
*,
names,
mxmn1,
comppv,
mxmn_b,
ismax,
histogram_inc,
prtbads,
flagbads,
numform,
valhdr,
maxhdr,
minhdr,
absmhdr,
pdhdr,
nastring,
doabsmax,
shortabsmax,
print_info,
):
# handle magpct stuff here:
mag = ext1[comppv], ext2[comppv]
if magpct_filterval is not None and len(magpct_filterval) > 1:
magfilt = magpct_filterval[comppv]
else:
magfilt = magpct_filterval
pv = comppv.copy()
pct, spct = _get_pct_diff(
ext1,
ext2,
filterval,
pv,
nastring,
mxmn_b=mxmn_b,
ismax=ismax,
flagbads=flagbads,
)
pct_ret = pct[pv]
hsto = ytools.histogram(pct_ret, histogram_inc)
# for trimming down if prtbad set:
prtpv = _get_badpv(pct, pv, *prtbads, True)
pctlen = max(len(pdhdr), len(max(spct, key=len)))
sformatpd = f"{{:{pctlen}}}"
# for writer.formheader:
numlen = max(13, len(max(names, key=len)), len(numform.format(np.pi)))
if not doabsmax:
print_info.headers1.extend([*names, ""])
print_info.headers2.extend([valhdr, valhdr, pdhdr])
print_info.formats.extend([numform, numform, sformatpd])
print_info.printargs.extend([ext1, ext2, spct])
print_info.widths.extend([numlen, numlen, pctlen])
print_info.seps.extend([4, 2, 2])
print_info.justs.extend(["c", "c", "c"])
elif shortabsmax:
print_info.headers1.extend([*names, ""])
print_info.headers2.extend([absmhdr, absmhdr, pdhdr])
print_info.formats.extend([numform, numform, sformatpd])
print_info.printargs.extend([ext1, ext2, spct])
print_info.widths.extend([numlen, numlen, pctlen])
print_info.seps.extend([4, 2, 2])
print_info.justs.extend(["c", "c", "c"])
else:
print_info.headers1.extend([names[0], names[0], names[0], names[1], ""])
print_info.headers2.extend([maxhdr, minhdr, absmhdr, absmhdr, pdhdr])
print_info.formats.extend([numform, numform, numform, numform, sformatpd])
print_info.printargs.extend([mxmn1[:, 0], mxmn1[:, 1], ext1, ext2, spct])
print_info.widths.extend([numlen, numlen, numlen, numlen, pctlen])
print_info.seps.extend([4, 2, 2, 2, 2])
print_info.justs.extend(["c", "c", "c", "c", "c"])
return dict(
pct=pct_ret, spct=spct, hsto=hsto, prtpv=prtpv, mag=mag, magfilt=magfilt
)
def _figure_on(name, doabsmax, show_figures):
figsize = [8.5, 11.0]
if doabsmax:
figsize[1] /= 3.0
if show_figures:
plt.figure(name, figsize=figsize)
plt.clf()
else:
plt.figure(figsize=figsize)
def _figure_off(show_figures):
if not show_figures:
plt.close()
def _prep_subplot(pctinfo, sp):
if "mx" in pctinfo:
# if not just doing absmax
if sp > 311:
plt.subplot(sp, sharex=plt.gca())
else:
plt.subplot(sp)
def _plot_magpct(
pctinfo,
names,
desc,
doabsmax,
filename,
magpct_options,
use_range,
maxhdr,
minhdr,
absmhdr,
show_figures,
tight_layout_args,
):
ptitle = f"{desc} - {{}} Comparison vs Magnitude"
xl = f"{names[1]} Magnitude"
yl = f"% Diff of {names[0]} vs {names[1]}"
_figure_on("Magpct - " + desc, doabsmax, show_figures)
try:
for lbl, hdr, sp, ismax in (
("mx", maxhdr, 311, True),
("mn", minhdr, 312, False),
("amx", absmhdr, 313, True),
):
_prep_subplot(pctinfo, sp)
if lbl in pctinfo:
if use_range:
ref = pctinfo["amx"]["mag"][1]
else:
ref = None
magpct(
pctinfo[lbl]["mag"][0],
pctinfo[lbl]["mag"][1],
Ref=ref,
ismax=ismax,
filterval=pctinfo[lbl]["magfilt"],
**magpct_options,
)
plt.title(ptitle.format(hdr))
plt.xlabel(xl)
plt.ylabel(yl)
plt.grid(True)
plt.tight_layout(**tight_layout_args)
if isinstance(filename, str):
plt.savefig(filename + ".magpct.png")
finally:
_figure_off(show_figures)
def _plot_histogram(
pctinfo,
names,
desc,
doabsmax,
filename,
histogram_inc,
maxhdr,
minhdr,
absmhdr,
show_figures,
tight_layout_args,
):
ptitle = f"{desc} - {{}} Comparison Histogram"
xl = f"% Diff of {names[0]} vs {names[1]}"
yl = "Percent Occurrence (%)"
_figure_on("Histogram - " + desc, doabsmax, show_figures)
try:
for lbl, hdr, sp in (
("mx", maxhdr, 311),
("mn", minhdr, 312),
("amx", absmhdr, 313),
):
_prep_subplot(pctinfo, sp)
if lbl in pctinfo:
width = histogram_inc
x = pctinfo[lbl]["hsto"][:, 0]
y = pctinfo[lbl]["hsto"][:, 2]
colors = ["b"] * len(x)
ax = abs(x)
pv1 = ((ax > 5) & (ax <= 10)).nonzero()[0]
pv2 = (ax > 10).nonzero()[0]
for pv, c in ((pv1, "m"), (pv2, "r")):
for i in pv:
colors[i] = c
plt.bar(x, y, width=width, color=colors, align="center")
plt.title(ptitle.format(hdr))
plt.xlabel(xl)
plt.ylabel(yl)
x = abs(max(plt.xlim(), key=abs))
if x < 5:
plt.xlim(-5, 5)
plt.grid(True)
plt.tight_layout(**tight_layout_args)
if isinstance(filename, str):
plt.savefig(filename + ".histogram.png")
finally:
_figure_off(show_figures)
def rptpct1(
mxmn1,
mxmn2,
filename,
*,
title="PERCENT DIFFERENCE REPORT",
names=("Self", "Reference"),
desc=None,
filterval=None,
labels=None,
units=None,
ignorepv=None,
uf_reds=None,
use_range=True,
numform=None,
prtbad=None,
prtbadh=None,
prtbadl=None,
flagbad=None,
flagbadh=None,
flagbadl=None,
dohistogram=True,
histogram_inc=1.0,
domagpct=True,
magpct_options=None,
doabsmax=False,
shortabsmax=False,
roundvals=-1,
rowhdr="Row",
deschdr="Description",
maxhdr="Maximum",
minhdr="Minimum",
absmhdr="Abs-Max",
perpage=-1,
tight_layout_args=None,
show_figures=False,
align_by_label=True,
):
"""
Write a percent difference report between 2 sets of max/min data
Parameters
----------
mxmn1 : 2d array_like or SimpleNamespace
The max/min data to compare to the `mxmn2` set. If 2-column
array_like, its columns are: [max, min]. If SimpleNamespace,
it must be as defined in :class:`DR_Results` and have these
members:
.. code-block:: none
.ext = [max, min]
.drminfo = SimpleNamespace which has (at least):
.desc = one line description of category
.filterval = the filter value; (see `filterval`
description below)
.labels = a list of descriptions; one per row
.ignorepv = these rows will get 'n/a' for % diff
.units = string with units
.uf_reds = uncertainty factors
Note that the inputs `desc`, `labels`, etc, override the
values above.
mxmn2 : 2d array_like or SimpleNamespace
The reference set of max/min data. Format is the same as
`mxmn1`.
.. note::
If both `mxmn1` and `mxmn2` are SimpleNamespaces and have
the ``.drminfo.labels`` attribute, this routine will, by
default, use the labels to align the data sets for
comparison. To prevent this, set the `align_by_label`
parameter to False.
filename : string or file_like or 1 or None
Either a name of a file, or is a file_like object as returned
by :func:`open` or :class:`io.StringIO`. Input as integer 1 to
write to stdout. Can also be the name of a directory or None;
in these cases, a GUI is opened for file selection.
title : string; must be named; optional
Title for the report
names : list/tuple; must be named; optional
Two (short) strings identifying the two sets of data
desc : string or None; must be named; optional
A one line description of the table. Overrides
`mxmn1.drminfo.desc`. If neither are input,
'No description provided' is used.
filterval : scalar, 1d array_like or None; must be named; optional
Numbers with absolute value <= than `filterval` will get a
'n/a' % diff. If vector, length must match number of rows in
`mxmn1` and `mxmn2` data. Overrides `mxmn1.drminfo.filterval`.
If neither are input, `filterval` is set to 1.e-6.
labels : list or None; must be named; optional
A list of strings briefly describing each row. Overrides
`mxmn1.drminfo.labels`. If neither are input,
``['Row 1','Row 2',...]`` is used.
units : string or None; must be named; optional
Specifies the units. Overrides `mxmn1.drminfo.units`. If
neither are input, 'Not specified' is used.
ignorepv : 1d array or None; must be named; optional
0-offset index vector specifying which rows of `mxmn1` to
ignore (they get the 'n/a' % diff). Overrides
`mxmn1.drminfo.ignorepv`. If neither are input, no rows are
ignored (though `filterval` is still used).
.. note::
`ignorepv` applies *before* any alignment by labels is
done (when `align_by_label` is True, which is the
default).
uf_reds : 1d array or None; must be named; optional
Uncertainty factors: [rigid, elastic, dynamic, static].
Overrides `mxmn1.drminfo.uf_reds`. If neither is input,
'Not specified' is used.
use_range : bool; must be named, optional
If True, the denominator of the % diff calc for both the max
& min for each row is the absolute maximum of the reference
max & min for that row. If False, the denominator is the
applicable reference max or min. A quick example shows why
``use_range=True`` might be useful:
.. code-block:: none
If [max1, min1] = [12345, -10] and
[max2, min2] = [12300, 50]
Then:
% diff = [0.37%, 0.49%] if use_range is True
% diff = [0.37%, 120.00%] if use_range is False
Note that the sign of the % diff is defined such that a
positive % diff means an exceedance: where ``max1 > max2`` or
``min1 < min2``.
`use_range` is ignored if `doabsmax` is True.
numform : string or None; must be named; optional
Format of the max & min numbers. If None, it is set internally
to be 13 chars wide and depends on the range of numbers to
print:
- if range is "small", numform='{:13.xf}' where "x" ranges
from 0 to 7
- if range is "large", numform='{:13.6e}'
prtbad : scalar or None; must be named; optional
Only print rows where ``abs(%diff) > prtbad``. For example, to
print rows off by more than 5%, use ``prtbad=5``. `prtbad`
takes precedence over `prtbadh` and `prtbadl`.
prtbadh : scalar or None; must be named; optional
Only print rows where ``%diff > prtbadh``. Handy for showing
just the exceedances. `prtbadh` takes precedence over
`prtbadl`.
prtbadl : scalar or None; must be named; optional
Only print rows where ``%diff < prtbadl``. Handy for showing
where reference rows are higher.
flagbad : scalar or None; must be named; optional
Flag % diffs where ``abs(%diff) > flagbad``. Works similar to
`prtbad`. The flag is an asterisk (*).
flagbadh : scalar or None; must be named; optional
Flag % diffs where ``%diff > flagbadh``. Works similar to
`prtbadh`. Handy for flagging exceedances. `flagbadh` takes
precedence over `flagbadl`.
flagbadl : scalar or None; must be named; optional
Flag % diffs where ``%diff < flagbadl``. Works similar to
`prtbadl`.
dohistogram : bool; must be named; optional
If True, plot the histograms. Plots will be written to
"`filename`.histogram.png".
histogram_inc : scalar; must be named; optional
The histogram increment; defaults to 1.0 (for 1%).
domagpct : bool; must be named; optional
If True, plot the percent differences versus magnitude via
:func:`magpct`. Plots will be written to
"`filename`.magpct.png". Filtering for the "magpct" plot is
controlled by the ``magpct_options['filterval']`` and
``magpct_options['symlogy']`` options. By default, all percent
differences are shown, but the larger values (according to the
`filterval` filter) are emphasized by using a mixed linear/log
y-axis. The percent differences for the `ignorepv` rows are
not plotted.
magpct_options : None or dict; must be named; optional
If None, it is internally reset to::
magpct_options = {'filterval': 'filterval'}
Use this parameter to provide any options to :func:`magpct`
but note that the `filterval` option for :func:`magpct` is
treated specially. Here, in addition to any of the values that
:func:`magpct` accepts, it can also be set to the string
"filterval" as in the default case shown above. In that case,
``magpct_options['filterval']`` gets internally reset to the
initial value of `filterval` (which is None by default).
.. note::
The call to :func:`magpct` is *after* applying `ignorepv`
and doing any data aligning by labels.
.. note::
The two filter value options (`filterval` and
``magpct_options['filterval']``) have different defaults:
None and 'filterval`, respectively. They also differ on how
the ``None`` setting is used: for `filterval`, None is
replaced by 1.e-6 while for `magpct_filterval`, None means
that the "magpct" plot will not have any filters applied at
all.
.. note::
The above means that, if you accept the default values for
`filterval` and for ``magpct_options['filterval']``, then
tables and the histogram plots will use a `filterval` of
1.e-6 while the "magpct" plots will use no filter (it
compares everything except perfect zeros).
doabsmax : bool; must be named; optional
If True, compare only absolute maximums.
shortabsmax : bool; must be named; optional
If True, set ``doabsmax=True`` and do not print the max1 and
min1 columns.
roundvals : integer; must be named; optional
Round max & min numbers at specified decimal. If negative, no
rounding.
rowhdr : string; must be named; optional
Header for row number column
deschdr : string; must be named; optional
Header for description column
maxhdr : string; must be named; optional
Header for the column 1 data
minhdr : string; must be named; optional
Header for the column 2 data
absmhdr : string; must be named; optional
Header for abs-max column
perpage : integer; must be named; optional
The number of lines to write perpage. If < 1, there is no
limit (one page).
tight_layout_args : dict or None; must be named; optional
Arguments for :func:`matplotlib.pyplot.tight_layout`. If None,
defaults to ``{'pad': 3.0}``.
show_figures : bool; must be named; optional
If True, plot figures will be displayed on the screen for
interactive viewing. Warning: there may be many figures.
align_by_label : bool; must be named; optional
If True, use labels to align the two sets of data for
comparison. See note above under the `mxmn2` option.
Returns
-------
pdiff_info : dict
Dictionary with 'amx' (abs-max), 'mx' (max), and 'mn' keys:
.. code-block:: none
<class 'dict'>[n=3]
'amx': <class 'dict'>[n=5]
'hsto' : float64 ndarray 33 elems: (11, 3)
'mag' : [n=2]: (float64 ndarray: (100,), ...
'pct' : float64 ndarray 100 elems: (100,)
'prtpv': bool ndarray 100 elems: (100,)
'spct' : [n=100]: [' -2.46', ' -1.50', ...
'mn' : <class 'dict'>[n=5]
'hsto' : float64 ndarray 33 elems: (11, 3)
'mag' : [n=2]: (float64 ndarray: (100,), ...
'pct' : float64 ndarray 100 elems: (100,)
'prtpv': bool ndarray 100 elems: (100,)
'spct' : [n=100]: [' 1.55', ' 1.53', ...
'mx' : <class 'dict'>[n=5]
'hsto' : float64 ndarray 27 elems: (9, 3)
'mag' : [n=2]: (float64 ndarray: (100,), ...
'pct' : float64 ndarray 100 elems: (100,)
'prtpv': bool ndarray 100 elems: (100,)
'spct' : [n=100]: [' -2.46', ' -1.50', ...
Where:
.. code-block:: none
'hsto' : output of :func:`histogram`: [center, count, %]
'mag' : inputs to :func:`magpct`
'pct' : percent differences
'prtpv' : rows to print partition vector
'spct' : string version of 'pct'
Examples
--------
>>> import numpy as np
>>> from pyyeti import cla
>>> ext1 = [[120.0, -8.0],
... [8.0, -120.0]]
>>> ext2 = [[115.0, -5.0],
... [10.0, -125.0]]
Run :func:`rptpct1` multiple times to get a more complete picture
of all the output (the table is very wide). Also, the plots will
be turned off for this example.
First, the header:
>>> opts = {'domagpct': False, 'dohistogram': False}
>>> dct = cla.rptpct1(ext1, ext2, 1, **opts) # doctest: +ELLIPSIS
PERCENT DIFFERENCE REPORT
<BLANKLINE>
Description: No description provided
Uncertainty: Not specified
Units: Not specified
Filter: 1e-06
Notes: % Diff = +/- abs(Self-Reference)/max(abs(Reference...
Sign set such that positive % differences indicate...
Date: ...
...
Then, the max/min/absmax percent difference table in 3 calls:
>>> dct = cla.rptpct1(ext1, ext2, 1, **opts) # doctest: +ELLIPSIS
PERCENT DIFFERENCE REPORT
...
Self Reference ...
Row Description Maximum Maximum % Diff ...
------- ----------- ------------- ------------- ------- ...
1 Row 1 120.00000 115.00000 4.35 ...
2 Row 2 8.00000 10.00000 -1.60 ...
...
>>> dct = cla.rptpct1(ext1, ext2, 1, **opts) # doctest: +ELLIPSIS
PERCENT DIFFERENCE REPORT
...
... Self Reference ...
Row Description ... Minimum Minimum % Diff ...
------- ----------- ...------------- ------------- ------- ...
1 Row 1 ... -8.00000 -5.00000 2.61 ...
2 Row 2 ... -120.00000 -125.00000 -4.00 ...
...
>>> dct = cla.rptpct1(ext1, ext2, 1, **opts) # doctest: +ELLIPSIS
PERCENT DIFFERENCE REPORT
...
... Self Reference
Row Description ... Abs-Max Abs-Max % Diff
------- ----------- ...------------- ------------- -------
1 Row 1 ... 120.00000 115.00000 4.35
2 Row 2 ... 120.00000 125.00000 -4.00
...
Finally, the histogram summaries:
>>> dct = cla.rptpct1(ext1, ext2, 1, **opts) # doctest: +ELLIPSIS
PERCENT DIFFERENCE REPORT
...
No description provided - Maximum Comparison Histogram
<BLANKLINE>
% Diff Count Percent
-------- -------- -------
-2.00 1 50.00
4.00 1 50.00
<BLANKLINE>
0.0% of values are within 1%
50.0% of values are within 2%
100.0% of values are within 5%
<BLANKLINE>
% Diff Statistics: [Min, Max, Mean, StdDev] = [-1.60, 4.35,...
<BLANKLINE>
<BLANKLINE>
No description provided - Minimum Comparison Histogram
<BLANKLINE>
% Diff Count Percent
-------- -------- -------
-4.00 1 50.00
3.00 1 50.00
<BLANKLINE>
0.0% of values are within 1%
100.0% of values are within 5%
<BLANKLINE>
% Diff Statistics: [Min, Max, Mean, StdDev] = [-4.00, 2.61,...
<BLANKLINE>
<BLANKLINE>
No description provided - Abs-Max Comparison Histogram
<BLANKLINE>
% Diff Count Percent
-------- -------- -------
-4.00 1 50.00
4.00 1 50.00
<BLANKLINE>
0.0% of values are within 1%
100.0% of values are within 5%
<BLANKLINE>
% Diff Statistics: [Min, Max, Mean, StdDev] = [-4.00, 4.35,...
"""
if tight_layout_args is None:
tight_layout_args = {"pad": 3.0}
if magpct_options is None:
magpct_options = {"filterval": "filterval"}
else:
magpct_options = magpct_options.copy()
# magpct_options['filterval'] get special treatment:
magpct_filterval = magpct_options["filterval"]
del magpct_options["filterval"]
if isinstance(magpct_filterval, str):
if magpct_filterval != "filterval":
raise ValueError(
"``magpct_options['filterval']`` is an invalid "
f"string: {magpct_filterval!r} (can only "
"be 'filterval' if a string)"
)
# copy the initial `filterval` setting:
magpct_filterval = filterval
infovars = (
"desc",
"filterval",
"magpct_filterval",
"labels",
"units",
"ignorepv",
"uf_reds",
)
dct = locals()
infodct = {n: dct[n] for n in infovars}
del dct
# check mxmn1:
if isinstance(mxmn1, SimpleNamespace):
sns = mxmn1.drminfo
for key, value in infodct.items():
if value is None:
infodct[key] = getattr(sns, key, None)
del sns
mxmn1 = mxmn1.ext
else:
mxmn1 = np.atleast_2d(mxmn1)
row_number = np.arange(1, mxmn1.shape[0] + 1)
# check mxmn2:
if isinstance(mxmn2, SimpleNamespace) and getattr(mxmn2, "drminfo", None):
labels2 = mxmn2.drminfo.labels
mxmn2 = mxmn2.ext
if align_by_label:
# use labels and labels2 to align data; this is in case
# the two sets of results recover some of the same items,
# but not all
mxmn1, mxmn2, row_number = _align_mxmn(
mxmn1, mxmn2, labels2, row_number, infodct
)
else:
mxmn2 = np.atleast_2d(mxmn2)
desc = infodct["desc"]
if desc is None:
desc = "No description provided"
R = mxmn1.shape[0]
if R != mxmn2.shape[0]:
raise ValueError(
f"`mxmn1` and `mxmn2` have a different number of rows: "
f"{R} vs {mxmn2.shape[0]} for category with `desc` = {desc}"
)
filterval = infodct["filterval"]
magpct_filterval = infodct["magpct_filterval"]
labels = infodct["labels"]
units = infodct["units"]
ignorepv = infodct["ignorepv"]
uf_reds = infodct["uf_reds"]
del infodct
if filterval is None:
filterval = 1.0e-6
filterval = _proc_filterval(filterval, R, "filterval")
magpct_filterval = _proc_filterval(
magpct_filterval, R, "magpct_options['filterval']"
)
if labels is None:
labels = [f"Row {i + 1:6d}" for i in range(R)]
elif len(labels) != R:
raise ValueError(
"length of `labels` does not match number"
f" of rows in `mxmn1`: {len(labels)} vs {R} for "
f"category with `desc` = {desc}"
)
if units is None:
units = "Not specified"
if numform is None:
numform = _get_numform(mxmn1)
pdhdr = "% Diff"
nastring = "n/a "
comppv = np.ones(R, bool)
if ignorepv is not None:
comppv[ignorepv] = False
# for row labels:
w = max(11, len(max(labels, key=len)))
frm = f"{{:{w}}}"
# start preparing for writer.formheader:
print_info = SimpleNamespace(
headers1=["", ""],
headers2=[rowhdr, deschdr],
formats=["{:7d}", frm],
printargs=[row_number, labels],
widths=[7, w],
seps=[0, 2],
justs=["c", "l"],
)
if shortabsmax:
doabsmax = True
if doabsmax:
use_range = False
if roundvals > -1:
mxmn1 = np.round(mxmn1, roundvals)
mxmn2 = np.round(mxmn2, roundvals)
prtbads = (prtbad, prtbadh, prtbadl)
flagbads = (flagbad, flagbadh, flagbadl)
# compute percent differences
pctinfo = {}
kwargs = dict(
names=names,
mxmn1=mxmn1,
comppv=comppv,
histogram_inc=histogram_inc,
numform=numform,
prtbads=prtbads,
flagbads=flagbads,
maxhdr=maxhdr,
minhdr=minhdr,
absmhdr=absmhdr,
pdhdr=pdhdr,
nastring=nastring,
doabsmax=doabsmax,
shortabsmax=shortabsmax,
print_info=print_info,
)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", r"All-NaN (slice|axis) encountered")
mx1 = np.nanmax(abs(mxmn1), axis=1)
mx2 = np.nanmax(abs(mxmn2), axis=1)
if not doabsmax:
max1, min1 = mxmn1[:, 0], mxmn1[:, 1]
max2, min2 = mxmn2[:, 0], mxmn2[:, 1]
mxmn_b = mxmn2 if use_range else None
prtpv = np.zeros(R, bool)
for i in zip(
("mx", "mn", "amx"),
(max1, min1, mx1),
(max2, min2, mx2),
(True, False, True),
(maxhdr, minhdr, absmhdr),
):
lbl, ext1, ext2, ismax, valhdr = i
pctinfo[lbl] = _proc_pct(
ext1,
ext2,
filterval,
magpct_filterval,
mxmn_b=mxmn_b,
ismax=ismax,
valhdr=valhdr,
**kwargs,
)
prtpv |= pctinfo[lbl]["prtpv"]
prtpv &= comppv
else:
pctinfo["amx"] = _proc_pct(
mx1,
mx2,
filterval,
magpct_filterval,
mxmn_b=None,
ismax=True,
valhdr=absmhdr,
**kwargs,
)
prtpv = pctinfo["amx"]["prtpv"]
hu, frm = writer.formheader(
[print_info.headers1, print_info.headers2],
print_info.widths,
print_info.formats,
sep=print_info.seps,
just=print_info.justs,
)
# format page header:
misc = _get_filtline(filterval) + _get_noteline(use_range, names, prtbads, flagbads)
hdrs = _get_rpt_headers(desc=desc, uf_reds=uf_reds, units=units, misc=misc)
header = title + "\n\n" + hdrs + "\n"
imode = plt.isinteractive()
plt.interactive(show_figures)
try:
if domagpct:
_plot_magpct(
pctinfo,
names,
desc,
doabsmax,
filename,
magpct_options,
use_range,
maxhdr,
minhdr,
absmhdr,
show_figures,
tight_layout_args,
)
if dohistogram:
_plot_histogram(
pctinfo,
names,
desc,
doabsmax,
filename,
histogram_inc,
maxhdr,
minhdr,
absmhdr,
show_figures,
tight_layout_args,
)
finally:
plt.interactive(imode)
# write results
@ytools.write_text_file
def _wtcmp(f, header, hu, frm, printargs, perpage, prtpv, pctinfo, desc):
prtpv = prtpv.nonzero()[0]
if perpage < 1:
# one additional in case size is zero
perpage = prtpv.size + 1
pages = (prtpv.size + perpage - 1) // perpage
if prtpv.size < len(printargs[0]):
for i, item in enumerate(printargs):
printargs[i] = [item[j] for j in prtpv]
tabhead = header + hu
pager = "\n" # + chr(12)
for p in range(pages):
if p > 0:
f.write(pager)
f.write(tabhead)
b = p * perpage
e = b + perpage
writer.vecwrite(f, frm, *printargs, so=slice(b, e))
f.write(pager)
for lbl, hdr in zip(("mx", "mn", "amx"), (maxhdr, minhdr, absmhdr)):
if lbl in pctinfo:
f.write(_get_histogram_str(desc, hdr, pctinfo[lbl]))
_wtcmp(
filename, header, hu, frm, print_info.printargs, perpage, prtpv, pctinfo, desc
)
return pctinfo
| 34.19606 | 88 | 0.549173 | 0 | 0 | 0 | 0 | 946 | 0.025951 | 0 | 0 | 19,228 | 0.527474 |
47154e4f13dd1dcdda9cb5781744c6421ce714c9 | 1,759 | py | Python | python/run_selective_universal.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 17 | 2018-02-05T15:09:01.000Z | 2022-03-15T06:27:07.000Z | python/run_selective_universal.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 1 | 2019-03-03T05:30:38.000Z | 2019-03-08T04:44:39.000Z | python/run_selective_universal.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 2 | 2019-07-26T07:17:09.000Z | 2019-10-16T03:44:02.000Z | import argparse
from attacks.image_save_runner import ImageSaveAttackRunner
from attacks.selective_universal import SelectiveUniversal
from dataset import Dataset
from models import create_ensemble
from models.model_configs import config_from_string
parser = argparse.ArgumentParser(description='Defence')
parser.add_argument('--input_dir', metavar='DIR',
help='Input directory with images.')
parser.add_argument('--output_dir', metavar='FILE',
help='Output directory to save images.')
parser.add_argument('--max_epsilon', type=int, default=16, metavar='N',
help='Maximum size of adversarial perturbation. (default: 16.0)')
parser.add_argument('--npy_files', nargs='+', type=str)
parser.add_argument('--ensemble', nargs='+', help='Class names for the defensive ensemble.')
parser.add_argument('--ensemble_weights', nargs='+', type=float,
help='Weights for weighted geometric mean of output probs')
parser.add_argument('--checkpoint_paths', nargs='+', help='Paths to checkpoint files for each model.')
parser.add_argument('--try_mirrors', action='store_true', default=False)
def main():
args = parser.parse_args()
dataset = Dataset(args.input_dir, target_file='')
cfgs = [config_from_string(s) for s in args.ensemble]
target_model = create_ensemble(cfgs, args.ensemble_weights, args.checkpoint_paths).cuda()
target_model.eval()
attack = SelectiveUniversal(
target_model,
args.npy_files,
max_epsilon=args.max_epsilon,
try_mirrors = args.try_mirrors
)
runner = ImageSaveAttackRunner(dataset, args.output_dir)
# Only supports batch size of 1
runner.run(attack, 1)
if __name__ == '__main__':
main()
| 38.23913 | 102 | 0.712905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 472 | 0.268334 |
4715857bcca4a207ecfd692fa8d2496336300ae1 | 449,136 | py | Python | py-fedex/pyfedex/location_service_v11.py | QwadwoNyamekye/purplship-carriers | ce34e3054de246e3d85ddf6928b607193d061ae2 | [
"MIT"
] | 2 | 2021-04-12T22:40:28.000Z | 2021-04-21T18:28:31.000Z | py-fedex/pyfedex/location_service_v11.py | QwadwoNyamekye/purplship-carriers | ce34e3054de246e3d85ddf6928b607193d061ae2 | [
"MIT"
] | 2 | 2021-01-29T07:14:31.000Z | 2021-02-18T18:29:23.000Z | py-fedex/pyfedex/location_service_v11.py | QwadwoNyamekye/purplship-carriers | ce34e3054de246e3d85ddf6928b607193d061ae2 | [
"MIT"
] | 3 | 2020-09-09T17:04:46.000Z | 2021-03-05T00:32:32.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Fri Mar 6 15:54:36 2020 by generateDS.py version 2.35.15.
# Python 3.8.1 (v3.8.1:1b293b6006, Dec 18 2019, 14:08:53) [Clang 6.0 (clang-600.0.57)]
#
# Command line options:
# ('--no-namespace-defs', '')
# ('-o', './python/location_service_v11.py')
#
# Command line arguments:
# ./schemas/LocationService_v11.xsd
#
# Command line:
# /Users/danielkobina/Documents/Open/.sandbox/bin/generateDS --no-namespace-defs -o "./python/location_service_v11.py" ./schemas/LocationService_v11.xsd
#
# Current working directory (os.getcwd()):
# 2020-02
#
from six.moves import zip_longest
import os
import sys
import re as re_
import base64
import datetime as datetime_
import decimal as decimal_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
SaveElementTreeNode = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
try:
if isinstance(infile, os.PathLike):
infile = os.path.join(infile)
except AttributeError:
pass
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
element = etree_.fromstring(instring, parser=parser, **kwargs)
return element
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for an example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
# Additionally, the generatedsnamespaces module can contain a python
# dictionary named GenerateDSNamespaceTypePrefixes that associates element
# types with the namespace prefixes that are to be added to the
# "xsi:type" attribute value. See the exportAttributes method of
# any generated element type and the generation of "xsi:type" for an
# example of the use of this table.
# An example table:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceTypePrefixes = {
# "ElementtypeC": "aaa:",
# "ElementtypeD": "bbb:",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
try:
from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_
except ImportError:
GenerateDSNamespaceTypePrefixes_ = {}
#
# You can replace the following class definition by defining an
# importable module named "generatedscollector" containing a class
# named "GdsCollector". See the default class definition below for
# clues about the possible content of that class.
#
try:
from generatedscollector import GdsCollector as GdsCollector_
except ImportError:
class GdsCollector_(object):
def __init__(self, messages=None):
if messages is None:
self.messages = []
else:
self.messages = messages
def add_message(self, msg):
self.messages.append(msg)
def get_messages(self):
return self.messages
def clear_messages(self):
self.messages = []
def print_messages(self):
for msg in self.messages:
print("Warning: {}".format(msg))
def write_messages(self, outstream):
for msg in self.messages:
outstream.write("Warning: {}\n".format(msg))
#
# The super-class for enum types
#
try:
from enum import Enum
except ImportError:
Enum = object
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
__hash__ = object.__hash__
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_parse_string(self, input_data, node=None, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_parse_integer(self, input_data, node=None, input_name=''):
try:
ival = int(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires integer value: %s' % exp)
return ival
def gds_validate_integer(self, input_data, node=None, input_name=''):
try:
value = int(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires integer value')
return value
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integer valuess')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_parse_float(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires float or double value: %s' % exp)
return fval_
def gds_validate_float(self, input_data, node=None, input_name=''):
try:
value = float(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires float value')
return value
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of float values')
return values
def gds_format_decimal(self, input_data, input_name=''):
return ('%s' % input_data).rstrip('0')
def gds_parse_decimal(self, input_data, node=None, input_name=''):
try:
decimal_value = decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return decimal_value
def gds_validate_decimal(self, input_data, node=None, input_name=''):
try:
value = decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return value
def gds_format_decimal_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_decimal_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
decimal_.Decimal(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of decimal values')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_parse_double(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires double or float value: %s' % exp)
return fval_
def gds_validate_double(self, input_data, node=None, input_name=''):
try:
value = float(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires double or float value')
return value
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(
node, 'Requires sequence of double or float values')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_parse_boolean(self, input_data, node=None, input_name=''):
if input_data in ('true', '1'):
bval = True
elif input_data in ('false', '0'):
bval = False
else:
raise_parse_error(node, 'Requires boolean value')
return bval
def gds_validate_boolean(self, input_data, node=None, input_name=''):
if input_data not in (True, 1, False, 0, ):
raise_parse_error(
node,
'Requires boolean value '
'(one of True, 1, False, 0)')
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in (True, 1, False, 0, ):
raise_parse_error(
node,
'Requires sequence of boolean values '
'(one of True, 1, False, 0)')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (
time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns.
# The target value must match at least one of the patterns
# in order for the test to succeed.
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
mo = re_.search(patterns2, target)
if mo is not None and len(mo.group(0)) == len(target):
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_check_cardinality_(
self, value, input_name,
min_occurs=0, max_occurs=1, required=None):
if value is None:
length = 0
elif isinstance(value, list):
length = len(value)
else:
length = 1
if required is not None :
if required and length < 1:
self.gds_collector_.add_message(
"Required value {}{} is missing".format(
input_name, self.gds_get_node_lineno_()))
if length < min_occurs:
self.gds_collector_.add_message(
"Number of values for {}{} is below "
"the minimum allowed, "
"expected at least {}, found {}".format(
input_name, self.gds_get_node_lineno_(),
min_occurs, length))
elif length > max_occurs:
self.gds_collector_.add_message(
"Number of values for {}{} is above "
"the maximum allowed, "
"expected at most {}, found {}".format(
input_name, self.gds_get_node_lineno_(),
max_occurs, length))
def gds_validate_builtin_ST_(
self, validator, value, input_name,
min_occurs=None, max_occurs=None, required=None):
if value is not None:
try:
validator(value, input_name=input_name)
except GDSParseError as parse_error:
self.gds_collector_.add_message(str(parse_error))
def gds_validate_defined_ST_(
self, validator, value, input_name,
min_occurs=None, max_occurs=None, required=None):
if value is not None:
try:
validator(value)
except GDSParseError as parse_error:
self.gds_collector_.add_message(str(parse_error))
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
# provide default value in case option --disable-xml is used.
content = ""
content = etree_.tostring(node, encoding="unicode")
return content
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.items()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
if ExternalEncoding:
encoding = ExternalEncoding
else:
encoding = 'utf-8'
return instring.encode(encoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
def excl_select_objs_(obj):
return (obj[0] != 'parent_object_' and
obj[0] != 'gds_collector_')
if type(self) != type(other):
return False
return all(x == y for x, y in zip_longest(
filter(excl_select_objs_, self.__dict__.items()),
filter(excl_select_objs_, other.__dict__.items())))
def __ne__(self, other):
return not self.__eq__(other)
# Django ETL transform hooks.
def gds_djo_etl_transform(self):
pass
def gds_djo_etl_transform_db_obj(self, dbobj):
pass
# SQLAlchemy ETL transform hooks.
def gds_sqa_etl_transform(self):
return 0, None
def gds_sqa_etl_transform_db_obj(self, dbobj):
pass
def gds_get_node_lineno_(self):
if (hasattr(self, "gds_elementtree_node_") and
self.gds_elementtree_node_ is not None):
return ' near line {}'.format(
self.gds_elementtree_node_.sourceline)
else:
return ""
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = ''
# Set this to false in order to deactivate during export, the use of
# name space prefixes captured from the input document.
UseCapturedNS_ = True
CapturedNsmap_ = {}
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
def encode_str_2_3(instr):
return instr
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if node is not None:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name_=name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class CarrierCodeType(Enum):
"""Identification of a FedEx operating company (transportation)."""
FDXC='FDXC'
FDXE='FDXE'
FDXG='FDXG'
FXCC='FXCC'
FXFR='FXFR'
FXSP='FXSP'
class ConsolidationType(Enum):
INTERNATIONAL_DISTRIBUTION_FREIGHT='INTERNATIONAL_DISTRIBUTION_FREIGHT'
INTERNATIONAL_ECONOMY_DISTRIBUTION='INTERNATIONAL_ECONOMY_DISTRIBUTION'
INTERNATIONAL_GROUND_DISTRIBUTION='INTERNATIONAL_GROUND_DISTRIBUTION'
INTERNATIONAL_PRIORITY_DISTRIBUTION='INTERNATIONAL_PRIORITY_DISTRIBUTION'
TRANSBORDER_DISTRIBUTION='TRANSBORDER_DISTRIBUTION'
class CountryRelationshipType(Enum):
"""Describes relationship between origin and destination countries."""
DOMESTIC='DOMESTIC'
INTERNATIONAL='INTERNATIONAL'
class DayOfWeekType(Enum):
FRI='FRI'
MON='MON'
SAT='SAT'
SUN='SUN'
THU='THU'
TUE='TUE'
WED='WED'
class DistanceUnits(Enum):
KM='KM'
MI='MI'
class DistributionClearanceType(Enum):
DESTINATION_COUNTRY_CLEARANCE='DESTINATION_COUNTRY_CLEARANCE'
SINGLE_POINT_OF_CLEARANCE='SINGLE_POINT_OF_CLEARANCE'
class EnterprisePermissionType(Enum):
ALLOWED='ALLOWED'
ALLOWED_BY_EXCEPTION='ALLOWED_BY_EXCEPTION'
DISALLOWED='DISALLOWED'
class ExpressRegionCode(Enum):
"""Indicates a FedEx Express operating region."""
APAC='APAC'
CA='CA'
EMEA='EMEA'
LAC='LAC'
US='US'
class FedExLocationType(Enum):
"""Identifies a kind of FedEx facility."""
FEDEX_AUTHORIZED_SHIP_CENTER='FEDEX_AUTHORIZED_SHIP_CENTER'
FEDEX_EXPRESS_STATION='FEDEX_EXPRESS_STATION'
FEDEX_FACILITY='FEDEX_FACILITY'
FEDEX_FREIGHT_SERVICE_CENTER='FEDEX_FREIGHT_SERVICE_CENTER'
FEDEX_GROUND_TERMINAL='FEDEX_GROUND_TERMINAL'
FEDEX_HOME_DELIVERY_STATION='FEDEX_HOME_DELIVERY_STATION'
FEDEX_OFFICE='FEDEX_OFFICE'
FEDEX_ONSITE='FEDEX_ONSITE'
FEDEX_SELF_SERVICE_LOCATION='FEDEX_SELF_SERVICE_LOCATION'
FEDEX_SHIPSITE='FEDEX_SHIPSITE'
FEDEX_SHIP_AND_GET='FEDEX_SHIP_AND_GET'
FEDEX_SMART_POST_HUB='FEDEX_SMART_POST_HUB'
class LatestDropOffOverlayType(Enum):
"""Specifies the reason for the overlay of the daily last drop off time for
a carrier."""
US_WEST_COAST='US_WEST_COAST'
class LinearUnits(Enum):
CM='CM'
IN='IN'
class LocationAccessibilityType(Enum):
"""Indicates how this can be accessed."""
INSIDE='INSIDE'
OUTSIDE='OUTSIDE'
class LocationAttributesType(Enum):
ACCEPTS_CASH='ACCEPTS_CASH'
ALREADY_OPEN='ALREADY_OPEN'
CLEARANCE_SERVICES='CLEARANCE_SERVICES'
COPY_AND_PRINT_SERVICES='COPY_AND_PRINT_SERVICES'
DANGEROUS_GOODS_SERVICES='DANGEROUS_GOODS_SERVICES'
DIRECT_MAIL_SERVICES='DIRECT_MAIL_SERVICES'
DOMESTIC_SHIPPING_SERVICES='DOMESTIC_SHIPPING_SERVICES'
DROP_BOX='DROP_BOX'
INTERNATIONAL_SHIPPING_SERVICES='INTERNATIONAL_SHIPPING_SERVICES'
LOCATION_IS_IN_AIRPORT='LOCATION_IS_IN_AIRPORT'
NOTARY_SERVICES='NOTARY_SERVICES'
OBSERVES_DAY_LIGHT_SAVING_TIMES='OBSERVES_DAY_LIGHT_SAVING_TIMES'
OPEN_TWENTY_FOUR_HOURS='OPEN_TWENTY_FOUR_HOURS'
PACKAGING_SUPPLIES='PACKAGING_SUPPLIES'
PACK_AND_SHIP='PACK_AND_SHIP'
PASSPORT_PHOTO_SERVICES='PASSPORT_PHOTO_SERVICES'
RETURNS_SERVICES='RETURNS_SERVICES'
SIGNS_AND_BANNERS_SERVICE='SIGNS_AND_BANNERS_SERVICE'
SONY_PICTURE_STATION='SONY_PICTURE_STATION'
class LocationContentOptionType(Enum):
HOLIDAYS='HOLIDAYS'
LOCATION_DROPOFF_TIMES='LOCATION_DROPOFF_TIMES'
MAP_URL='MAP_URL'
TIMEZONE_OFFSET='TIMEZONE_OFFSET'
class LocationSearchFilterType(Enum):
"""Specifies the crieteria used to filter the location search results."""
EXCLUDE_LOCATIONS_OUTSIDE_COUNTRY='EXCLUDE_LOCATIONS_OUTSIDE_COUNTRY'
EXCLUDE_LOCATIONS_OUTSIDE_STATE_OR_PROVINCE='EXCLUDE_LOCATIONS_OUTSIDE_STATE_OR_PROVINCE'
EXCLUDE_UNAVAILABLE_LOCATIONS='EXCLUDE_UNAVAILABLE_LOCATIONS'
class LocationSortCriteriaType(Enum):
"""Specifies the criterion to be used to sort the location details."""
DISTANCE='DISTANCE'
LATEST_EXPRESS_DROPOFF_TIME='LATEST_EXPRESS_DROPOFF_TIME'
LATEST_GROUND_DROPOFF_TIME='LATEST_GROUND_DROPOFF_TIME'
LOCATION_TYPE='LOCATION_TYPE'
class LocationSortOrderType(Enum):
"""Specifies sort order of the location details."""
HIGHEST_TO_LOWEST='HIGHEST_TO_LOWEST'
LOWEST_TO_HIGHEST='LOWEST_TO_HIGHEST'
class LocationTransferOfPossessionType(Enum):
DROPOFF='DROPOFF'
HOLD_AT_LOCATION='HOLD_AT_LOCATION'
REDIRECT_TO_HOLD_AT_LOCATION='REDIRECT_TO_HOLD_AT_LOCATION'
class LocationsSearchCriteriaType(Enum):
"""Specifies the criteria types that may be used to search for FedEx
locations."""
ADDRESS='ADDRESS'
GEOGRAPHIC_COORDINATES='GEOGRAPHIC_COORDINATES'
PHONE_NUMBER='PHONE_NUMBER'
class MultipleMatchesActionType(Enum):
RETURN_ALL='RETURN_ALL'
RETURN_ERROR='RETURN_ERROR'
RETURN_FIRST='RETURN_FIRST'
class NotificationSeverityType(Enum):
ERROR='ERROR'
FAILURE='FAILURE'
NOTE='NOTE'
SUCCESS='SUCCESS'
WARNING='WARNING'
class OperationalHoursType(Enum):
CLOSED_ALL_DAY='CLOSED_ALL_DAY'
OPEN_ALL_DAY='OPEN_ALL_DAY'
OPEN_BY_HOURS='OPEN_BY_HOURS'
class ReservationAttributesType(Enum):
"""Attributes about a reservation at a FedEx location."""
RESERVATION_AVAILABLE='RESERVATION_AVAILABLE'
class ServiceCategoryType(Enum):
EXPRESS_FREIGHT='EXPRESS_FREIGHT'
EXPRESS_PARCEL='EXPRESS_PARCEL'
GROUND_HOME_DELIVERY='GROUND_HOME_DELIVERY'
class ShippingActionType(Enum):
DELIVERIES='DELIVERIES'
PICKUPS='PICKUPS'
class SupportedRedirectToHoldServiceType(Enum):
"""DEPRECATED as of July 2017."""
FEDEX_EXPRESS='FEDEX_EXPRESS'
FEDEX_GROUND='FEDEX_GROUND'
FEDEX_GROUND_HOME_DELIVERY='FEDEX_GROUND_HOME_DELIVERY'
class WeightUnits(Enum):
KG='KG'
LB='LB'
class Address(GeneratedsSuper):
"""Descriptive data for a physical location. May be used as an actual
physical address (place to which one could go), or as a container of
"address parts" which should be handled as a unit (such as a city-
state-ZIP combination within the US)."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, StreetLines=None, City=None, StateOrProvinceCode=None, PostalCode=None, UrbanizationCode=None, CountryCode=None, CountryName=None, Residential=None, GeographicCoordinates=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if StreetLines is None:
self.StreetLines = []
else:
self.StreetLines = StreetLines
self.StreetLines_nsprefix_ = None
self.City = City
self.City_nsprefix_ = None
self.StateOrProvinceCode = StateOrProvinceCode
self.StateOrProvinceCode_nsprefix_ = None
self.PostalCode = PostalCode
self.PostalCode_nsprefix_ = None
self.UrbanizationCode = UrbanizationCode
self.UrbanizationCode_nsprefix_ = None
self.CountryCode = CountryCode
self.CountryCode_nsprefix_ = None
self.CountryName = CountryName
self.CountryName_nsprefix_ = None
self.Residential = Residential
self.Residential_nsprefix_ = None
self.GeographicCoordinates = GeographicCoordinates
self.GeographicCoordinates_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Address)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Address.subclass:
return Address.subclass(*args_, **kwargs_)
else:
return Address(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_StreetLines(self):
return self.StreetLines
def set_StreetLines(self, StreetLines):
self.StreetLines = StreetLines
def add_StreetLines(self, value):
self.StreetLines.append(value)
def insert_StreetLines_at(self, index, value):
self.StreetLines.insert(index, value)
def replace_StreetLines_at(self, index, value):
self.StreetLines[index] = value
def get_City(self):
return self.City
def set_City(self, City):
self.City = City
def get_StateOrProvinceCode(self):
return self.StateOrProvinceCode
def set_StateOrProvinceCode(self, StateOrProvinceCode):
self.StateOrProvinceCode = StateOrProvinceCode
def get_PostalCode(self):
return self.PostalCode
def set_PostalCode(self, PostalCode):
self.PostalCode = PostalCode
def get_UrbanizationCode(self):
return self.UrbanizationCode
def set_UrbanizationCode(self, UrbanizationCode):
self.UrbanizationCode = UrbanizationCode
def get_CountryCode(self):
return self.CountryCode
def set_CountryCode(self, CountryCode):
self.CountryCode = CountryCode
def get_CountryName(self):
return self.CountryName
def set_CountryName(self, CountryName):
self.CountryName = CountryName
def get_Residential(self):
return self.Residential
def set_Residential(self, Residential):
self.Residential = Residential
def get_GeographicCoordinates(self):
return self.GeographicCoordinates
def set_GeographicCoordinates(self, GeographicCoordinates):
self.GeographicCoordinates = GeographicCoordinates
def hasContent_(self):
if (
self.StreetLines or
self.City is not None or
self.StateOrProvinceCode is not None or
self.PostalCode is not None or
self.UrbanizationCode is not None or
self.CountryCode is not None or
self.CountryName is not None or
self.Residential is not None or
self.GeographicCoordinates is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Address', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Address')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Address':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Address')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Address', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Address'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Address', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for StreetLines_ in self.StreetLines:
namespaceprefix_ = self.StreetLines_nsprefix_ + ':' if (UseCapturedNS_ and self.StreetLines_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sStreetLines>%s</%sStreetLines>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(StreetLines_), input_name='StreetLines')), namespaceprefix_ , eol_))
if self.City is not None:
namespaceprefix_ = self.City_nsprefix_ + ':' if (UseCapturedNS_ and self.City_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCity>%s</%sCity>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.City), input_name='City')), namespaceprefix_ , eol_))
if self.StateOrProvinceCode is not None:
namespaceprefix_ = self.StateOrProvinceCode_nsprefix_ + ':' if (UseCapturedNS_ and self.StateOrProvinceCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sStateOrProvinceCode>%s</%sStateOrProvinceCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StateOrProvinceCode), input_name='StateOrProvinceCode')), namespaceprefix_ , eol_))
if self.PostalCode is not None:
namespaceprefix_ = self.PostalCode_nsprefix_ + ':' if (UseCapturedNS_ and self.PostalCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPostalCode>%s</%sPostalCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PostalCode), input_name='PostalCode')), namespaceprefix_ , eol_))
if self.UrbanizationCode is not None:
namespaceprefix_ = self.UrbanizationCode_nsprefix_ + ':' if (UseCapturedNS_ and self.UrbanizationCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUrbanizationCode>%s</%sUrbanizationCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.UrbanizationCode), input_name='UrbanizationCode')), namespaceprefix_ , eol_))
if self.CountryCode is not None:
namespaceprefix_ = self.CountryCode_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCountryCode>%s</%sCountryCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryCode), input_name='CountryCode')), namespaceprefix_ , eol_))
if self.CountryName is not None:
namespaceprefix_ = self.CountryName_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryName_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCountryName>%s</%sCountryName>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryName), input_name='CountryName')), namespaceprefix_ , eol_))
if self.Residential is not None:
namespaceprefix_ = self.Residential_nsprefix_ + ':' if (UseCapturedNS_ and self.Residential_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sResidential>%s</%sResidential>%s' % (namespaceprefix_ , self.gds_format_boolean(self.Residential, input_name='Residential'), namespaceprefix_ , eol_))
if self.GeographicCoordinates is not None:
namespaceprefix_ = self.GeographicCoordinates_nsprefix_ + ':' if (UseCapturedNS_ and self.GeographicCoordinates_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sGeographicCoordinates>%s</%sGeographicCoordinates>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GeographicCoordinates), input_name='GeographicCoordinates')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'StreetLines':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StreetLines')
value_ = self.gds_validate_string(value_, node, 'StreetLines')
self.StreetLines.append(value_)
self.StreetLines_nsprefix_ = child_.prefix
elif nodeName_ == 'City':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'City')
value_ = self.gds_validate_string(value_, node, 'City')
self.City = value_
self.City_nsprefix_ = child_.prefix
elif nodeName_ == 'StateOrProvinceCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StateOrProvinceCode')
value_ = self.gds_validate_string(value_, node, 'StateOrProvinceCode')
self.StateOrProvinceCode = value_
self.StateOrProvinceCode_nsprefix_ = child_.prefix
elif nodeName_ == 'PostalCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PostalCode')
value_ = self.gds_validate_string(value_, node, 'PostalCode')
self.PostalCode = value_
self.PostalCode_nsprefix_ = child_.prefix
elif nodeName_ == 'UrbanizationCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'UrbanizationCode')
value_ = self.gds_validate_string(value_, node, 'UrbanizationCode')
self.UrbanizationCode = value_
self.UrbanizationCode_nsprefix_ = child_.prefix
elif nodeName_ == 'CountryCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CountryCode')
value_ = self.gds_validate_string(value_, node, 'CountryCode')
self.CountryCode = value_
self.CountryCode_nsprefix_ = child_.prefix
elif nodeName_ == 'CountryName':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CountryName')
value_ = self.gds_validate_string(value_, node, 'CountryName')
self.CountryName = value_
self.CountryName_nsprefix_ = child_.prefix
elif nodeName_ == 'Residential':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'Residential')
ival_ = self.gds_validate_boolean(ival_, node, 'Residential')
self.Residential = ival_
self.Residential_nsprefix_ = child_.prefix
elif nodeName_ == 'GeographicCoordinates':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'GeographicCoordinates')
value_ = self.gds_validate_string(value_, node, 'GeographicCoordinates')
self.GeographicCoordinates = value_
self.GeographicCoordinates_nsprefix_ = child_.prefix
# end class Address
class AddressAncillaryDetail(GeneratedsSuper):
"""Additional information about a physical location, such as suite number,
cross street, floor number in a building. These details are not
typically a part of a standard address definition; however, these
details might help locate the address."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, LocationInCity=None, LocationInProperty=None, Accessibility=None, Building=None, Department=None, RoomFloor=None, Suite=None, Apartment=None, Room=None, CrossStreet=None, AdditionalDescriptions=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.LocationInCity = LocationInCity
self.LocationInCity_nsprefix_ = None
self.LocationInProperty = LocationInProperty
self.LocationInProperty_nsprefix_ = None
self.Accessibility = Accessibility
self.validate_LocationAccessibilityType(self.Accessibility)
self.Accessibility_nsprefix_ = None
self.Building = Building
self.Building_nsprefix_ = None
self.Department = Department
self.Department_nsprefix_ = None
self.RoomFloor = RoomFloor
self.RoomFloor_nsprefix_ = None
self.Suite = Suite
self.Suite_nsprefix_ = None
self.Apartment = Apartment
self.Apartment_nsprefix_ = None
self.Room = Room
self.Room_nsprefix_ = None
self.CrossStreet = CrossStreet
self.CrossStreet_nsprefix_ = None
if AdditionalDescriptions is None:
self.AdditionalDescriptions = []
else:
self.AdditionalDescriptions = AdditionalDescriptions
self.AdditionalDescriptions_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AddressAncillaryDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AddressAncillaryDetail.subclass:
return AddressAncillaryDetail.subclass(*args_, **kwargs_)
else:
return AddressAncillaryDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_LocationInCity(self):
return self.LocationInCity
def set_LocationInCity(self, LocationInCity):
self.LocationInCity = LocationInCity
def get_LocationInProperty(self):
return self.LocationInProperty
def set_LocationInProperty(self, LocationInProperty):
self.LocationInProperty = LocationInProperty
def get_Accessibility(self):
return self.Accessibility
def set_Accessibility(self, Accessibility):
self.Accessibility = Accessibility
def get_Building(self):
return self.Building
def set_Building(self, Building):
self.Building = Building
def get_Department(self):
return self.Department
def set_Department(self, Department):
self.Department = Department
def get_RoomFloor(self):
return self.RoomFloor
def set_RoomFloor(self, RoomFloor):
self.RoomFloor = RoomFloor
def get_Suite(self):
return self.Suite
def set_Suite(self, Suite):
self.Suite = Suite
def get_Apartment(self):
return self.Apartment
def set_Apartment(self, Apartment):
self.Apartment = Apartment
def get_Room(self):
return self.Room
def set_Room(self, Room):
self.Room = Room
def get_CrossStreet(self):
return self.CrossStreet
def set_CrossStreet(self, CrossStreet):
self.CrossStreet = CrossStreet
def get_AdditionalDescriptions(self):
return self.AdditionalDescriptions
def set_AdditionalDescriptions(self, AdditionalDescriptions):
self.AdditionalDescriptions = AdditionalDescriptions
def add_AdditionalDescriptions(self, value):
self.AdditionalDescriptions.append(value)
def insert_AdditionalDescriptions_at(self, index, value):
self.AdditionalDescriptions.insert(index, value)
def replace_AdditionalDescriptions_at(self, index, value):
self.AdditionalDescriptions[index] = value
def validate_LocationAccessibilityType(self, value):
result = True
# Validate type LocationAccessibilityType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['INSIDE', 'OUTSIDE']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationAccessibilityType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.LocationInCity is not None or
self.LocationInProperty is not None or
self.Accessibility is not None or
self.Building is not None or
self.Department is not None or
self.RoomFloor is not None or
self.Suite is not None or
self.Apartment is not None or
self.Room is not None or
self.CrossStreet is not None or
self.AdditionalDescriptions
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressAncillaryDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressAncillaryDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'AddressAncillaryDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressAncillaryDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressAncillaryDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressAncillaryDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressAncillaryDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LocationInCity is not None:
namespaceprefix_ = self.LocationInCity_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationInCity_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationInCity>%s</%sLocationInCity>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationInCity), input_name='LocationInCity')), namespaceprefix_ , eol_))
if self.LocationInProperty is not None:
namespaceprefix_ = self.LocationInProperty_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationInProperty_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationInProperty>%s</%sLocationInProperty>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationInProperty), input_name='LocationInProperty')), namespaceprefix_ , eol_))
if self.Accessibility is not None:
namespaceprefix_ = self.Accessibility_nsprefix_ + ':' if (UseCapturedNS_ and self.Accessibility_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAccessibility>%s</%sAccessibility>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Accessibility), input_name='Accessibility')), namespaceprefix_ , eol_))
if self.Building is not None:
namespaceprefix_ = self.Building_nsprefix_ + ':' if (UseCapturedNS_ and self.Building_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sBuilding>%s</%sBuilding>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Building), input_name='Building')), namespaceprefix_ , eol_))
if self.Department is not None:
namespaceprefix_ = self.Department_nsprefix_ + ':' if (UseCapturedNS_ and self.Department_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDepartment>%s</%sDepartment>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Department), input_name='Department')), namespaceprefix_ , eol_))
if self.RoomFloor is not None:
namespaceprefix_ = self.RoomFloor_nsprefix_ + ':' if (UseCapturedNS_ and self.RoomFloor_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sRoomFloor>%s</%sRoomFloor>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RoomFloor), input_name='RoomFloor')), namespaceprefix_ , eol_))
if self.Suite is not None:
namespaceprefix_ = self.Suite_nsprefix_ + ':' if (UseCapturedNS_ and self.Suite_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSuite>%s</%sSuite>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Suite), input_name='Suite')), namespaceprefix_ , eol_))
if self.Apartment is not None:
namespaceprefix_ = self.Apartment_nsprefix_ + ':' if (UseCapturedNS_ and self.Apartment_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sApartment>%s</%sApartment>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Apartment), input_name='Apartment')), namespaceprefix_ , eol_))
if self.Room is not None:
namespaceprefix_ = self.Room_nsprefix_ + ':' if (UseCapturedNS_ and self.Room_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sRoom>%s</%sRoom>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Room), input_name='Room')), namespaceprefix_ , eol_))
if self.CrossStreet is not None:
namespaceprefix_ = self.CrossStreet_nsprefix_ + ':' if (UseCapturedNS_ and self.CrossStreet_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCrossStreet>%s</%sCrossStreet>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CrossStreet), input_name='CrossStreet')), namespaceprefix_ , eol_))
for AdditionalDescriptions_ in self.AdditionalDescriptions:
namespaceprefix_ = self.AdditionalDescriptions_nsprefix_ + ':' if (UseCapturedNS_ and self.AdditionalDescriptions_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAdditionalDescriptions>%s</%sAdditionalDescriptions>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(AdditionalDescriptions_), input_name='AdditionalDescriptions')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'LocationInCity':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationInCity')
value_ = self.gds_validate_string(value_, node, 'LocationInCity')
self.LocationInCity = value_
self.LocationInCity_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationInProperty':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationInProperty')
value_ = self.gds_validate_string(value_, node, 'LocationInProperty')
self.LocationInProperty = value_
self.LocationInProperty_nsprefix_ = child_.prefix
elif nodeName_ == 'Accessibility':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Accessibility')
value_ = self.gds_validate_string(value_, node, 'Accessibility')
self.Accessibility = value_
self.Accessibility_nsprefix_ = child_.prefix
# validate type LocationAccessibilityType
self.validate_LocationAccessibilityType(self.Accessibility)
elif nodeName_ == 'Building':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Building')
value_ = self.gds_validate_string(value_, node, 'Building')
self.Building = value_
self.Building_nsprefix_ = child_.prefix
elif nodeName_ == 'Department':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Department')
value_ = self.gds_validate_string(value_, node, 'Department')
self.Department = value_
self.Department_nsprefix_ = child_.prefix
elif nodeName_ == 'RoomFloor':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'RoomFloor')
value_ = self.gds_validate_string(value_, node, 'RoomFloor')
self.RoomFloor = value_
self.RoomFloor_nsprefix_ = child_.prefix
elif nodeName_ == 'Suite':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Suite')
value_ = self.gds_validate_string(value_, node, 'Suite')
self.Suite = value_
self.Suite_nsprefix_ = child_.prefix
elif nodeName_ == 'Apartment':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Apartment')
value_ = self.gds_validate_string(value_, node, 'Apartment')
self.Apartment = value_
self.Apartment_nsprefix_ = child_.prefix
elif nodeName_ == 'Room':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Room')
value_ = self.gds_validate_string(value_, node, 'Room')
self.Room = value_
self.Room_nsprefix_ = child_.prefix
elif nodeName_ == 'CrossStreet':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CrossStreet')
value_ = self.gds_validate_string(value_, node, 'CrossStreet')
self.CrossStreet = value_
self.CrossStreet_nsprefix_ = child_.prefix
elif nodeName_ == 'AdditionalDescriptions':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AdditionalDescriptions')
value_ = self.gds_validate_string(value_, node, 'AdditionalDescriptions')
self.AdditionalDescriptions.append(value_)
self.AdditionalDescriptions_nsprefix_ = child_.prefix
# end class AddressAncillaryDetail
class AddressToLocationRelationshipDetail(GeneratedsSuper):
"""Specifies the relationship between the address specificed and the
address of the FedEx Location in terms of distance."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, MatchedAddress=None, MatchedAddressGeographicCoordinates=None, DistanceAndLocationDetails=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.MatchedAddress = MatchedAddress
self.MatchedAddress_nsprefix_ = None
self.MatchedAddressGeographicCoordinates = MatchedAddressGeographicCoordinates
self.MatchedAddressGeographicCoordinates_nsprefix_ = None
if DistanceAndLocationDetails is None:
self.DistanceAndLocationDetails = []
else:
self.DistanceAndLocationDetails = DistanceAndLocationDetails
self.DistanceAndLocationDetails_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AddressToLocationRelationshipDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AddressToLocationRelationshipDetail.subclass:
return AddressToLocationRelationshipDetail.subclass(*args_, **kwargs_)
else:
return AddressToLocationRelationshipDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_MatchedAddress(self):
return self.MatchedAddress
def set_MatchedAddress(self, MatchedAddress):
self.MatchedAddress = MatchedAddress
def get_MatchedAddressGeographicCoordinates(self):
return self.MatchedAddressGeographicCoordinates
def set_MatchedAddressGeographicCoordinates(self, MatchedAddressGeographicCoordinates):
self.MatchedAddressGeographicCoordinates = MatchedAddressGeographicCoordinates
def get_DistanceAndLocationDetails(self):
return self.DistanceAndLocationDetails
def set_DistanceAndLocationDetails(self, DistanceAndLocationDetails):
self.DistanceAndLocationDetails = DistanceAndLocationDetails
def add_DistanceAndLocationDetails(self, value):
self.DistanceAndLocationDetails.append(value)
def insert_DistanceAndLocationDetails_at(self, index, value):
self.DistanceAndLocationDetails.insert(index, value)
def replace_DistanceAndLocationDetails_at(self, index, value):
self.DistanceAndLocationDetails[index] = value
def hasContent_(self):
if (
self.MatchedAddress is not None or
self.MatchedAddressGeographicCoordinates is not None or
self.DistanceAndLocationDetails
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressToLocationRelationshipDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AddressToLocationRelationshipDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'AddressToLocationRelationshipDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AddressToLocationRelationshipDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AddressToLocationRelationshipDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AddressToLocationRelationshipDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AddressToLocationRelationshipDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.MatchedAddress is not None:
namespaceprefix_ = self.MatchedAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.MatchedAddress_nsprefix_) else ''
self.MatchedAddress.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MatchedAddress', pretty_print=pretty_print)
if self.MatchedAddressGeographicCoordinates is not None:
namespaceprefix_ = self.MatchedAddressGeographicCoordinates_nsprefix_ + ':' if (UseCapturedNS_ and self.MatchedAddressGeographicCoordinates_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMatchedAddressGeographicCoordinates>%s</%sMatchedAddressGeographicCoordinates>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MatchedAddressGeographicCoordinates), input_name='MatchedAddressGeographicCoordinates')), namespaceprefix_ , eol_))
for DistanceAndLocationDetails_ in self.DistanceAndLocationDetails:
namespaceprefix_ = self.DistanceAndLocationDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.DistanceAndLocationDetails_nsprefix_) else ''
DistanceAndLocationDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DistanceAndLocationDetails', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'MatchedAddress':
obj_ = Address.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MatchedAddress = obj_
obj_.original_tagname_ = 'MatchedAddress'
elif nodeName_ == 'MatchedAddressGeographicCoordinates':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MatchedAddressGeographicCoordinates')
value_ = self.gds_validate_string(value_, node, 'MatchedAddressGeographicCoordinates')
self.MatchedAddressGeographicCoordinates = value_
self.MatchedAddressGeographicCoordinates_nsprefix_ = child_.prefix
elif nodeName_ == 'DistanceAndLocationDetails':
obj_ = DistanceAndLocationDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.DistanceAndLocationDetails.append(obj_)
obj_.original_tagname_ = 'DistanceAndLocationDetails'
# end class AddressToLocationRelationshipDetail
class CarrierDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Carrier=None, ServiceCategory=None, ServiceType=None, CountryRelationship=None, NormalLatestDropOffDetails=None, ExceptionalLatestDropOffDetails=None, EffectiveLatestDropOffDetails=None, ShippingHolidays=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Carrier = Carrier
self.validate_CarrierCodeType(self.Carrier)
self.Carrier_nsprefix_ = None
self.ServiceCategory = ServiceCategory
self.validate_ServiceCategoryType(self.ServiceCategory)
self.ServiceCategory_nsprefix_ = None
self.ServiceType = ServiceType
self.ServiceType_nsprefix_ = None
self.CountryRelationship = CountryRelationship
self.validate_CountryRelationshipType(self.CountryRelationship)
self.CountryRelationship_nsprefix_ = None
if NormalLatestDropOffDetails is None:
self.NormalLatestDropOffDetails = []
else:
self.NormalLatestDropOffDetails = NormalLatestDropOffDetails
self.NormalLatestDropOffDetails_nsprefix_ = None
if ExceptionalLatestDropOffDetails is None:
self.ExceptionalLatestDropOffDetails = []
else:
self.ExceptionalLatestDropOffDetails = ExceptionalLatestDropOffDetails
self.ExceptionalLatestDropOffDetails_nsprefix_ = None
self.EffectiveLatestDropOffDetails = EffectiveLatestDropOffDetails
self.EffectiveLatestDropOffDetails_nsprefix_ = None
if ShippingHolidays is None:
self.ShippingHolidays = []
else:
self.ShippingHolidays = ShippingHolidays
self.ShippingHolidays_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CarrierDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CarrierDetail.subclass:
return CarrierDetail.subclass(*args_, **kwargs_)
else:
return CarrierDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Carrier(self):
return self.Carrier
def set_Carrier(self, Carrier):
self.Carrier = Carrier
def get_ServiceCategory(self):
return self.ServiceCategory
def set_ServiceCategory(self, ServiceCategory):
self.ServiceCategory = ServiceCategory
def get_ServiceType(self):
return self.ServiceType
def set_ServiceType(self, ServiceType):
self.ServiceType = ServiceType
def get_CountryRelationship(self):
return self.CountryRelationship
def set_CountryRelationship(self, CountryRelationship):
self.CountryRelationship = CountryRelationship
def get_NormalLatestDropOffDetails(self):
return self.NormalLatestDropOffDetails
def set_NormalLatestDropOffDetails(self, NormalLatestDropOffDetails):
self.NormalLatestDropOffDetails = NormalLatestDropOffDetails
def add_NormalLatestDropOffDetails(self, value):
self.NormalLatestDropOffDetails.append(value)
def insert_NormalLatestDropOffDetails_at(self, index, value):
self.NormalLatestDropOffDetails.insert(index, value)
def replace_NormalLatestDropOffDetails_at(self, index, value):
self.NormalLatestDropOffDetails[index] = value
def get_ExceptionalLatestDropOffDetails(self):
return self.ExceptionalLatestDropOffDetails
def set_ExceptionalLatestDropOffDetails(self, ExceptionalLatestDropOffDetails):
self.ExceptionalLatestDropOffDetails = ExceptionalLatestDropOffDetails
def add_ExceptionalLatestDropOffDetails(self, value):
self.ExceptionalLatestDropOffDetails.append(value)
def insert_ExceptionalLatestDropOffDetails_at(self, index, value):
self.ExceptionalLatestDropOffDetails.insert(index, value)
def replace_ExceptionalLatestDropOffDetails_at(self, index, value):
self.ExceptionalLatestDropOffDetails[index] = value
def get_EffectiveLatestDropOffDetails(self):
return self.EffectiveLatestDropOffDetails
def set_EffectiveLatestDropOffDetails(self, EffectiveLatestDropOffDetails):
self.EffectiveLatestDropOffDetails = EffectiveLatestDropOffDetails
def get_ShippingHolidays(self):
return self.ShippingHolidays
def set_ShippingHolidays(self, ShippingHolidays):
self.ShippingHolidays = ShippingHolidays
def add_ShippingHolidays(self, value):
self.ShippingHolidays.append(value)
def insert_ShippingHolidays_at(self, index, value):
self.ShippingHolidays.insert(index, value)
def replace_ShippingHolidays_at(self, index, value):
self.ShippingHolidays[index] = value
def validate_CarrierCodeType(self, value):
result = True
# Validate type CarrierCodeType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FDXC', 'FDXE', 'FDXG', 'FXCC', 'FXFR', 'FXSP']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on CarrierCodeType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_ServiceCategoryType(self, value):
result = True
# Validate type ServiceCategoryType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['EXPRESS_FREIGHT', 'EXPRESS_PARCEL', 'GROUND_HOME_DELIVERY']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ServiceCategoryType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_CountryRelationshipType(self, value):
result = True
# Validate type CountryRelationshipType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['DOMESTIC', 'INTERNATIONAL']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on CountryRelationshipType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Carrier is not None or
self.ServiceCategory is not None or
self.ServiceType is not None or
self.CountryRelationship is not None or
self.NormalLatestDropOffDetails or
self.ExceptionalLatestDropOffDetails or
self.EffectiveLatestDropOffDetails is not None or
self.ShippingHolidays
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CarrierDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'CarrierDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CarrierDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CarrierDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CarrierDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CarrierDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Carrier is not None:
namespaceprefix_ = self.Carrier_nsprefix_ + ':' if (UseCapturedNS_ and self.Carrier_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCarrier>%s</%sCarrier>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Carrier), input_name='Carrier')), namespaceprefix_ , eol_))
if self.ServiceCategory is not None:
namespaceprefix_ = self.ServiceCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceCategory_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServiceCategory>%s</%sServiceCategory>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceCategory), input_name='ServiceCategory')), namespaceprefix_ , eol_))
if self.ServiceType is not None:
namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServiceType>%s</%sServiceType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_))
if self.CountryRelationship is not None:
namespaceprefix_ = self.CountryRelationship_nsprefix_ + ':' if (UseCapturedNS_ and self.CountryRelationship_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCountryRelationship>%s</%sCountryRelationship>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CountryRelationship), input_name='CountryRelationship')), namespaceprefix_ , eol_))
for NormalLatestDropOffDetails_ in self.NormalLatestDropOffDetails:
namespaceprefix_ = self.NormalLatestDropOffDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.NormalLatestDropOffDetails_nsprefix_) else ''
NormalLatestDropOffDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NormalLatestDropOffDetails', pretty_print=pretty_print)
for ExceptionalLatestDropOffDetails_ in self.ExceptionalLatestDropOffDetails:
namespaceprefix_ = self.ExceptionalLatestDropOffDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.ExceptionalLatestDropOffDetails_nsprefix_) else ''
ExceptionalLatestDropOffDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExceptionalLatestDropOffDetails', pretty_print=pretty_print)
if self.EffectiveLatestDropOffDetails is not None:
namespaceprefix_ = self.EffectiveLatestDropOffDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveLatestDropOffDetails_nsprefix_) else ''
self.EffectiveLatestDropOffDetails.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EffectiveLatestDropOffDetails', pretty_print=pretty_print)
for ShippingHolidays_ in self.ShippingHolidays:
namespaceprefix_ = self.ShippingHolidays_nsprefix_ + ':' if (UseCapturedNS_ and self.ShippingHolidays_nsprefix_) else ''
ShippingHolidays_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShippingHolidays', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Carrier':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Carrier')
value_ = self.gds_validate_string(value_, node, 'Carrier')
self.Carrier = value_
self.Carrier_nsprefix_ = child_.prefix
# validate type CarrierCodeType
self.validate_CarrierCodeType(self.Carrier)
elif nodeName_ == 'ServiceCategory':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServiceCategory')
value_ = self.gds_validate_string(value_, node, 'ServiceCategory')
self.ServiceCategory = value_
self.ServiceCategory_nsprefix_ = child_.prefix
# validate type ServiceCategoryType
self.validate_ServiceCategoryType(self.ServiceCategory)
elif nodeName_ == 'ServiceType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServiceType')
value_ = self.gds_validate_string(value_, node, 'ServiceType')
self.ServiceType = value_
self.ServiceType_nsprefix_ = child_.prefix
elif nodeName_ == 'CountryRelationship':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CountryRelationship')
value_ = self.gds_validate_string(value_, node, 'CountryRelationship')
self.CountryRelationship = value_
self.CountryRelationship_nsprefix_ = child_.prefix
# validate type CountryRelationshipType
self.validate_CountryRelationshipType(self.CountryRelationship)
elif nodeName_ == 'NormalLatestDropOffDetails':
obj_ = LatestDropOffDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NormalLatestDropOffDetails.append(obj_)
obj_.original_tagname_ = 'NormalLatestDropOffDetails'
elif nodeName_ == 'ExceptionalLatestDropOffDetails':
obj_ = LatestDropOffDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ExceptionalLatestDropOffDetails.append(obj_)
obj_.original_tagname_ = 'ExceptionalLatestDropOffDetails'
elif nodeName_ == 'EffectiveLatestDropOffDetails':
obj_ = LatestDropOffDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.EffectiveLatestDropOffDetails = obj_
obj_.original_tagname_ = 'EffectiveLatestDropOffDetails'
elif nodeName_ == 'ShippingHolidays':
obj_ = ShippingHoliday.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ShippingHolidays.append(obj_)
obj_.original_tagname_ = 'ShippingHolidays'
# end class CarrierDetail
class ClearanceCountryDetail(GeneratedsSuper):
"""Specifies the special services supported at the clearance location for
an individual destination country."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, ClearanceCountry=None, ServicesSupported=None, SpecialServicesSupported=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.ClearanceCountry = ClearanceCountry
self.ClearanceCountry_nsprefix_ = None
if ServicesSupported is None:
self.ServicesSupported = []
else:
self.ServicesSupported = ServicesSupported
self.ServicesSupported_nsprefix_ = None
if SpecialServicesSupported is None:
self.SpecialServicesSupported = []
else:
self.SpecialServicesSupported = SpecialServicesSupported
self.SpecialServicesSupported_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ClearanceCountryDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ClearanceCountryDetail.subclass:
return ClearanceCountryDetail.subclass(*args_, **kwargs_)
else:
return ClearanceCountryDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_ClearanceCountry(self):
return self.ClearanceCountry
def set_ClearanceCountry(self, ClearanceCountry):
self.ClearanceCountry = ClearanceCountry
def get_ServicesSupported(self):
return self.ServicesSupported
def set_ServicesSupported(self, ServicesSupported):
self.ServicesSupported = ServicesSupported
def add_ServicesSupported(self, value):
self.ServicesSupported.append(value)
def insert_ServicesSupported_at(self, index, value):
self.ServicesSupported.insert(index, value)
def replace_ServicesSupported_at(self, index, value):
self.ServicesSupported[index] = value
def get_SpecialServicesSupported(self):
return self.SpecialServicesSupported
def set_SpecialServicesSupported(self, SpecialServicesSupported):
self.SpecialServicesSupported = SpecialServicesSupported
def add_SpecialServicesSupported(self, value):
self.SpecialServicesSupported.append(value)
def insert_SpecialServicesSupported_at(self, index, value):
self.SpecialServicesSupported.insert(index, value)
def replace_SpecialServicesSupported_at(self, index, value):
self.SpecialServicesSupported[index] = value
def hasContent_(self):
if (
self.ClearanceCountry is not None or
self.ServicesSupported or
self.SpecialServicesSupported
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClearanceCountryDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ClearanceCountryDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ClearanceCountryDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClearanceCountryDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClearanceCountryDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClearanceCountryDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClearanceCountryDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ClearanceCountry is not None:
namespaceprefix_ = self.ClearanceCountry_nsprefix_ + ':' if (UseCapturedNS_ and self.ClearanceCountry_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sClearanceCountry>%s</%sClearanceCountry>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClearanceCountry), input_name='ClearanceCountry')), namespaceprefix_ , eol_))
for ServicesSupported_ in self.ServicesSupported:
namespaceprefix_ = self.ServicesSupported_nsprefix_ + ':' if (UseCapturedNS_ and self.ServicesSupported_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServicesSupported>%s</%sServicesSupported>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ServicesSupported_), input_name='ServicesSupported')), namespaceprefix_ , eol_))
for SpecialServicesSupported_ in self.SpecialServicesSupported:
namespaceprefix_ = self.SpecialServicesSupported_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServicesSupported_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSpecialServicesSupported>%s</%sSpecialServicesSupported>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SpecialServicesSupported_), input_name='SpecialServicesSupported')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'ClearanceCountry':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ClearanceCountry')
value_ = self.gds_validate_string(value_, node, 'ClearanceCountry')
self.ClearanceCountry = value_
self.ClearanceCountry_nsprefix_ = child_.prefix
elif nodeName_ == 'ServicesSupported':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServicesSupported')
value_ = self.gds_validate_string(value_, node, 'ServicesSupported')
self.ServicesSupported.append(value_)
self.ServicesSupported_nsprefix_ = child_.prefix
elif nodeName_ == 'SpecialServicesSupported':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SpecialServicesSupported')
value_ = self.gds_validate_string(value_, node, 'SpecialServicesSupported')
self.SpecialServicesSupported.append(value_)
self.SpecialServicesSupported_nsprefix_ = child_.prefix
# end class ClearanceCountryDetail
class ClearanceLocationDetail(GeneratedsSuper):
"""Specifies the details about the countries supported by this location."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, ServicesSupported=None, ConsolidationType=None, ClearanceLocationType=None, SpecialServicesSupported=None, ClearanceCountries=None, ClearanceRoutingCode=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if ServicesSupported is None:
self.ServicesSupported = []
else:
self.ServicesSupported = ServicesSupported
self.ServicesSupported_nsprefix_ = None
self.ConsolidationType = ConsolidationType
self.validate_ConsolidationType(self.ConsolidationType)
self.ConsolidationType_nsprefix_ = None
self.ClearanceLocationType = ClearanceLocationType
self.validate_DistributionClearanceType(self.ClearanceLocationType)
self.ClearanceLocationType_nsprefix_ = None
if SpecialServicesSupported is None:
self.SpecialServicesSupported = []
else:
self.SpecialServicesSupported = SpecialServicesSupported
self.SpecialServicesSupported_nsprefix_ = None
if ClearanceCountries is None:
self.ClearanceCountries = []
else:
self.ClearanceCountries = ClearanceCountries
self.ClearanceCountries_nsprefix_ = None
self.ClearanceRoutingCode = ClearanceRoutingCode
self.ClearanceRoutingCode_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ClearanceLocationDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ClearanceLocationDetail.subclass:
return ClearanceLocationDetail.subclass(*args_, **kwargs_)
else:
return ClearanceLocationDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_ServicesSupported(self):
return self.ServicesSupported
def set_ServicesSupported(self, ServicesSupported):
self.ServicesSupported = ServicesSupported
def add_ServicesSupported(self, value):
self.ServicesSupported.append(value)
def insert_ServicesSupported_at(self, index, value):
self.ServicesSupported.insert(index, value)
def replace_ServicesSupported_at(self, index, value):
self.ServicesSupported[index] = value
def get_ConsolidationType(self):
return self.ConsolidationType
def set_ConsolidationType(self, ConsolidationType):
self.ConsolidationType = ConsolidationType
def get_ClearanceLocationType(self):
return self.ClearanceLocationType
def set_ClearanceLocationType(self, ClearanceLocationType):
self.ClearanceLocationType = ClearanceLocationType
def get_SpecialServicesSupported(self):
return self.SpecialServicesSupported
def set_SpecialServicesSupported(self, SpecialServicesSupported):
self.SpecialServicesSupported = SpecialServicesSupported
def add_SpecialServicesSupported(self, value):
self.SpecialServicesSupported.append(value)
def insert_SpecialServicesSupported_at(self, index, value):
self.SpecialServicesSupported.insert(index, value)
def replace_SpecialServicesSupported_at(self, index, value):
self.SpecialServicesSupported[index] = value
def get_ClearanceCountries(self):
return self.ClearanceCountries
def set_ClearanceCountries(self, ClearanceCountries):
self.ClearanceCountries = ClearanceCountries
def add_ClearanceCountries(self, value):
self.ClearanceCountries.append(value)
def insert_ClearanceCountries_at(self, index, value):
self.ClearanceCountries.insert(index, value)
def replace_ClearanceCountries_at(self, index, value):
self.ClearanceCountries[index] = value
def get_ClearanceRoutingCode(self):
return self.ClearanceRoutingCode
def set_ClearanceRoutingCode(self, ClearanceRoutingCode):
self.ClearanceRoutingCode = ClearanceRoutingCode
def validate_ConsolidationType(self, value):
result = True
# Validate type ConsolidationType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['INTERNATIONAL_DISTRIBUTION_FREIGHT', 'INTERNATIONAL_ECONOMY_DISTRIBUTION', 'INTERNATIONAL_GROUND_DISTRIBUTION', 'INTERNATIONAL_PRIORITY_DISTRIBUTION', 'TRANSBORDER_DISTRIBUTION']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ConsolidationType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_DistributionClearanceType(self, value):
result = True
# Validate type DistributionClearanceType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['DESTINATION_COUNTRY_CLEARANCE', 'SINGLE_POINT_OF_CLEARANCE']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on DistributionClearanceType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.ServicesSupported or
self.ConsolidationType is not None or
self.ClearanceLocationType is not None or
self.SpecialServicesSupported or
self.ClearanceCountries or
self.ClearanceRoutingCode is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClearanceLocationDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ClearanceLocationDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ClearanceLocationDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClearanceLocationDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClearanceLocationDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClearanceLocationDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClearanceLocationDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for ServicesSupported_ in self.ServicesSupported:
namespaceprefix_ = self.ServicesSupported_nsprefix_ + ':' if (UseCapturedNS_ and self.ServicesSupported_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServicesSupported>%s</%sServicesSupported>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ServicesSupported_), input_name='ServicesSupported')), namespaceprefix_ , eol_))
if self.ConsolidationType is not None:
namespaceprefix_ = self.ConsolidationType_nsprefix_ + ':' if (UseCapturedNS_ and self.ConsolidationType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sConsolidationType>%s</%sConsolidationType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ConsolidationType), input_name='ConsolidationType')), namespaceprefix_ , eol_))
if self.ClearanceLocationType is not None:
namespaceprefix_ = self.ClearanceLocationType_nsprefix_ + ':' if (UseCapturedNS_ and self.ClearanceLocationType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sClearanceLocationType>%s</%sClearanceLocationType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClearanceLocationType), input_name='ClearanceLocationType')), namespaceprefix_ , eol_))
for SpecialServicesSupported_ in self.SpecialServicesSupported:
namespaceprefix_ = self.SpecialServicesSupported_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialServicesSupported_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSpecialServicesSupported>%s</%sSpecialServicesSupported>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SpecialServicesSupported_), input_name='SpecialServicesSupported')), namespaceprefix_ , eol_))
for ClearanceCountries_ in self.ClearanceCountries:
namespaceprefix_ = self.ClearanceCountries_nsprefix_ + ':' if (UseCapturedNS_ and self.ClearanceCountries_nsprefix_) else ''
ClearanceCountries_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ClearanceCountries', pretty_print=pretty_print)
if self.ClearanceRoutingCode is not None:
namespaceprefix_ = self.ClearanceRoutingCode_nsprefix_ + ':' if (UseCapturedNS_ and self.ClearanceRoutingCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sClearanceRoutingCode>%s</%sClearanceRoutingCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ClearanceRoutingCode), input_name='ClearanceRoutingCode')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'ServicesSupported':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServicesSupported')
value_ = self.gds_validate_string(value_, node, 'ServicesSupported')
self.ServicesSupported.append(value_)
self.ServicesSupported_nsprefix_ = child_.prefix
elif nodeName_ == 'ConsolidationType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ConsolidationType')
value_ = self.gds_validate_string(value_, node, 'ConsolidationType')
self.ConsolidationType = value_
self.ConsolidationType_nsprefix_ = child_.prefix
# validate type ConsolidationType
self.validate_ConsolidationType(self.ConsolidationType)
elif nodeName_ == 'ClearanceLocationType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ClearanceLocationType')
value_ = self.gds_validate_string(value_, node, 'ClearanceLocationType')
self.ClearanceLocationType = value_
self.ClearanceLocationType_nsprefix_ = child_.prefix
# validate type DistributionClearanceType
self.validate_DistributionClearanceType(self.ClearanceLocationType)
elif nodeName_ == 'SpecialServicesSupported':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SpecialServicesSupported')
value_ = self.gds_validate_string(value_, node, 'SpecialServicesSupported')
self.SpecialServicesSupported.append(value_)
self.SpecialServicesSupported_nsprefix_ = child_.prefix
elif nodeName_ == 'ClearanceCountries':
obj_ = ClearanceCountryDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ClearanceCountries.append(obj_)
obj_.original_tagname_ = 'ClearanceCountries'
elif nodeName_ == 'ClearanceRoutingCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ClearanceRoutingCode')
value_ = self.gds_validate_string(value_, node, 'ClearanceRoutingCode')
self.ClearanceRoutingCode = value_
self.ClearanceRoutingCode_nsprefix_ = child_.prefix
# end class ClearanceLocationDetail
class ClientDetail(GeneratedsSuper):
"""Descriptive data for the client submitting a transaction."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, AccountNumber=None, MeterNumber=None, MeterInstance=None, IntegratorId=None, Region=None, Localization=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.AccountNumber = AccountNumber
self.AccountNumber_nsprefix_ = None
self.MeterNumber = MeterNumber
self.MeterNumber_nsprefix_ = None
self.MeterInstance = MeterInstance
self.MeterInstance_nsprefix_ = None
self.IntegratorId = IntegratorId
self.IntegratorId_nsprefix_ = None
self.Region = Region
self.validate_ExpressRegionCode(self.Region)
self.Region_nsprefix_ = None
self.Localization = Localization
self.Localization_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ClientDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ClientDetail.subclass:
return ClientDetail.subclass(*args_, **kwargs_)
else:
return ClientDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AccountNumber(self):
return self.AccountNumber
def set_AccountNumber(self, AccountNumber):
self.AccountNumber = AccountNumber
def get_MeterNumber(self):
return self.MeterNumber
def set_MeterNumber(self, MeterNumber):
self.MeterNumber = MeterNumber
def get_MeterInstance(self):
return self.MeterInstance
def set_MeterInstance(self, MeterInstance):
self.MeterInstance = MeterInstance
def get_IntegratorId(self):
return self.IntegratorId
def set_IntegratorId(self, IntegratorId):
self.IntegratorId = IntegratorId
def get_Region(self):
return self.Region
def set_Region(self, Region):
self.Region = Region
def get_Localization(self):
return self.Localization
def set_Localization(self, Localization):
self.Localization = Localization
def validate_ExpressRegionCode(self, value):
result = True
# Validate type ExpressRegionCode, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['APAC', 'CA', 'EMEA', 'LAC', 'US']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ExpressRegionCode' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.AccountNumber is not None or
self.MeterNumber is not None or
self.MeterInstance is not None or
self.IntegratorId is not None or
self.Region is not None or
self.Localization is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClientDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ClientDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ClientDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClientDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClientDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClientDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClientDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.AccountNumber is not None:
namespaceprefix_ = self.AccountNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.AccountNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAccountNumber>%s</%sAccountNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AccountNumber), input_name='AccountNumber')), namespaceprefix_ , eol_))
if self.MeterNumber is not None:
namespaceprefix_ = self.MeterNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.MeterNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMeterNumber>%s</%sMeterNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MeterNumber), input_name='MeterNumber')), namespaceprefix_ , eol_))
if self.MeterInstance is not None:
namespaceprefix_ = self.MeterInstance_nsprefix_ + ':' if (UseCapturedNS_ and self.MeterInstance_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMeterInstance>%s</%sMeterInstance>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MeterInstance), input_name='MeterInstance')), namespaceprefix_ , eol_))
if self.IntegratorId is not None:
namespaceprefix_ = self.IntegratorId_nsprefix_ + ':' if (UseCapturedNS_ and self.IntegratorId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sIntegratorId>%s</%sIntegratorId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.IntegratorId), input_name='IntegratorId')), namespaceprefix_ , eol_))
if self.Region is not None:
namespaceprefix_ = self.Region_nsprefix_ + ':' if (UseCapturedNS_ and self.Region_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sRegion>%s</%sRegion>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Region), input_name='Region')), namespaceprefix_ , eol_))
if self.Localization is not None:
namespaceprefix_ = self.Localization_nsprefix_ + ':' if (UseCapturedNS_ and self.Localization_nsprefix_) else ''
self.Localization.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Localization', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AccountNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AccountNumber')
value_ = self.gds_validate_string(value_, node, 'AccountNumber')
self.AccountNumber = value_
self.AccountNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'MeterNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MeterNumber')
value_ = self.gds_validate_string(value_, node, 'MeterNumber')
self.MeterNumber = value_
self.MeterNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'MeterInstance':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MeterInstance')
value_ = self.gds_validate_string(value_, node, 'MeterInstance')
self.MeterInstance = value_
self.MeterInstance_nsprefix_ = child_.prefix
elif nodeName_ == 'IntegratorId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'IntegratorId')
value_ = self.gds_validate_string(value_, node, 'IntegratorId')
self.IntegratorId = value_
self.IntegratorId_nsprefix_ = child_.prefix
elif nodeName_ == 'Region':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Region')
value_ = self.gds_validate_string(value_, node, 'Region')
self.Region = value_
self.Region_nsprefix_ = child_.prefix
# validate type ExpressRegionCode
self.validate_ExpressRegionCode(self.Region)
elif nodeName_ == 'Localization':
obj_ = Localization.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Localization = obj_
obj_.original_tagname_ = 'Localization'
# end class ClientDetail
class Contact(GeneratedsSuper):
"""The descriptive data for a point-of-contact person."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, ContactId=None, PersonName=None, Title=None, CompanyName=None, PhoneNumber=None, PhoneExtension=None, TollFreePhoneNumber=None, PagerNumber=None, FaxNumber=None, EMailAddress=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.ContactId = ContactId
self.ContactId_nsprefix_ = None
self.PersonName = PersonName
self.PersonName_nsprefix_ = None
self.Title = Title
self.Title_nsprefix_ = None
self.CompanyName = CompanyName
self.CompanyName_nsprefix_ = None
self.PhoneNumber = PhoneNumber
self.PhoneNumber_nsprefix_ = None
self.PhoneExtension = PhoneExtension
self.PhoneExtension_nsprefix_ = None
self.TollFreePhoneNumber = TollFreePhoneNumber
self.TollFreePhoneNumber_nsprefix_ = None
self.PagerNumber = PagerNumber
self.PagerNumber_nsprefix_ = None
self.FaxNumber = FaxNumber
self.FaxNumber_nsprefix_ = None
self.EMailAddress = EMailAddress
self.EMailAddress_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Contact)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Contact.subclass:
return Contact.subclass(*args_, **kwargs_)
else:
return Contact(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_ContactId(self):
return self.ContactId
def set_ContactId(self, ContactId):
self.ContactId = ContactId
def get_PersonName(self):
return self.PersonName
def set_PersonName(self, PersonName):
self.PersonName = PersonName
def get_Title(self):
return self.Title
def set_Title(self, Title):
self.Title = Title
def get_CompanyName(self):
return self.CompanyName
def set_CompanyName(self, CompanyName):
self.CompanyName = CompanyName
def get_PhoneNumber(self):
return self.PhoneNumber
def set_PhoneNumber(self, PhoneNumber):
self.PhoneNumber = PhoneNumber
def get_PhoneExtension(self):
return self.PhoneExtension
def set_PhoneExtension(self, PhoneExtension):
self.PhoneExtension = PhoneExtension
def get_TollFreePhoneNumber(self):
return self.TollFreePhoneNumber
def set_TollFreePhoneNumber(self, TollFreePhoneNumber):
self.TollFreePhoneNumber = TollFreePhoneNumber
def get_PagerNumber(self):
return self.PagerNumber
def set_PagerNumber(self, PagerNumber):
self.PagerNumber = PagerNumber
def get_FaxNumber(self):
return self.FaxNumber
def set_FaxNumber(self, FaxNumber):
self.FaxNumber = FaxNumber
def get_EMailAddress(self):
return self.EMailAddress
def set_EMailAddress(self, EMailAddress):
self.EMailAddress = EMailAddress
def hasContent_(self):
if (
self.ContactId is not None or
self.PersonName is not None or
self.Title is not None or
self.CompanyName is not None or
self.PhoneNumber is not None or
self.PhoneExtension is not None or
self.TollFreePhoneNumber is not None or
self.PagerNumber is not None or
self.FaxNumber is not None or
self.EMailAddress is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Contact', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Contact')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Contact':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Contact')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Contact', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Contact'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Contact', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ContactId is not None:
namespaceprefix_ = self.ContactId_nsprefix_ + ':' if (UseCapturedNS_ and self.ContactId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sContactId>%s</%sContactId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ContactId), input_name='ContactId')), namespaceprefix_ , eol_))
if self.PersonName is not None:
namespaceprefix_ = self.PersonName_nsprefix_ + ':' if (UseCapturedNS_ and self.PersonName_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPersonName>%s</%sPersonName>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PersonName), input_name='PersonName')), namespaceprefix_ , eol_))
if self.Title is not None:
namespaceprefix_ = self.Title_nsprefix_ + ':' if (UseCapturedNS_ and self.Title_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTitle>%s</%sTitle>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Title), input_name='Title')), namespaceprefix_ , eol_))
if self.CompanyName is not None:
namespaceprefix_ = self.CompanyName_nsprefix_ + ':' if (UseCapturedNS_ and self.CompanyName_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCompanyName>%s</%sCompanyName>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CompanyName), input_name='CompanyName')), namespaceprefix_ , eol_))
if self.PhoneNumber is not None:
namespaceprefix_ = self.PhoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PhoneNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPhoneNumber>%s</%sPhoneNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PhoneNumber), input_name='PhoneNumber')), namespaceprefix_ , eol_))
if self.PhoneExtension is not None:
namespaceprefix_ = self.PhoneExtension_nsprefix_ + ':' if (UseCapturedNS_ and self.PhoneExtension_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPhoneExtension>%s</%sPhoneExtension>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PhoneExtension), input_name='PhoneExtension')), namespaceprefix_ , eol_))
if self.TollFreePhoneNumber is not None:
namespaceprefix_ = self.TollFreePhoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.TollFreePhoneNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTollFreePhoneNumber>%s</%sTollFreePhoneNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TollFreePhoneNumber), input_name='TollFreePhoneNumber')), namespaceprefix_ , eol_))
if self.PagerNumber is not None:
namespaceprefix_ = self.PagerNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PagerNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPagerNumber>%s</%sPagerNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PagerNumber), input_name='PagerNumber')), namespaceprefix_ , eol_))
if self.FaxNumber is not None:
namespaceprefix_ = self.FaxNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.FaxNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sFaxNumber>%s</%sFaxNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FaxNumber), input_name='FaxNumber')), namespaceprefix_ , eol_))
if self.EMailAddress is not None:
namespaceprefix_ = self.EMailAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.EMailAddress_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sEMailAddress>%s</%sEMailAddress>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EMailAddress), input_name='EMailAddress')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'ContactId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ContactId')
value_ = self.gds_validate_string(value_, node, 'ContactId')
self.ContactId = value_
self.ContactId_nsprefix_ = child_.prefix
elif nodeName_ == 'PersonName':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PersonName')
value_ = self.gds_validate_string(value_, node, 'PersonName')
self.PersonName = value_
self.PersonName_nsprefix_ = child_.prefix
elif nodeName_ == 'Title':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Title')
value_ = self.gds_validate_string(value_, node, 'Title')
self.Title = value_
self.Title_nsprefix_ = child_.prefix
elif nodeName_ == 'CompanyName':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CompanyName')
value_ = self.gds_validate_string(value_, node, 'CompanyName')
self.CompanyName = value_
self.CompanyName_nsprefix_ = child_.prefix
elif nodeName_ == 'PhoneNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PhoneNumber')
value_ = self.gds_validate_string(value_, node, 'PhoneNumber')
self.PhoneNumber = value_
self.PhoneNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'PhoneExtension':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PhoneExtension')
value_ = self.gds_validate_string(value_, node, 'PhoneExtension')
self.PhoneExtension = value_
self.PhoneExtension_nsprefix_ = child_.prefix
elif nodeName_ == 'TollFreePhoneNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TollFreePhoneNumber')
value_ = self.gds_validate_string(value_, node, 'TollFreePhoneNumber')
self.TollFreePhoneNumber = value_
self.TollFreePhoneNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'PagerNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PagerNumber')
value_ = self.gds_validate_string(value_, node, 'PagerNumber')
self.PagerNumber = value_
self.PagerNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'FaxNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'FaxNumber')
value_ = self.gds_validate_string(value_, node, 'FaxNumber')
self.FaxNumber = value_
self.FaxNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'EMailAddress':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'EMailAddress')
value_ = self.gds_validate_string(value_, node, 'EMailAddress')
self.EMailAddress = value_
self.EMailAddress_nsprefix_ = child_.prefix
# end class Contact
class Dimensions(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Length=None, Width=None, Height=None, Units=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Length = Length
self.Length_nsprefix_ = None
self.Width = Width
self.Width_nsprefix_ = None
self.Height = Height
self.Height_nsprefix_ = None
self.Units = Units
self.validate_LinearUnits(self.Units)
self.Units_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Dimensions)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Dimensions.subclass:
return Dimensions.subclass(*args_, **kwargs_)
else:
return Dimensions(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Length(self):
return self.Length
def set_Length(self, Length):
self.Length = Length
def get_Width(self):
return self.Width
def set_Width(self, Width):
self.Width = Width
def get_Height(self):
return self.Height
def set_Height(self, Height):
self.Height = Height
def get_Units(self):
return self.Units
def set_Units(self, Units):
self.Units = Units
def validate_LinearUnits(self, value):
result = True
# Validate type LinearUnits, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['CM', 'IN']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LinearUnits' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Length is not None or
self.Width is not None or
self.Height is not None or
self.Units is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dimensions', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Dimensions')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Dimensions':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Dimensions')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Dimensions', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Dimensions'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dimensions', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Length is not None:
namespaceprefix_ = self.Length_nsprefix_ + ':' if (UseCapturedNS_ and self.Length_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLength>%s</%sLength>%s' % (namespaceprefix_ , self.gds_format_integer(self.Length, input_name='Length'), namespaceprefix_ , eol_))
if self.Width is not None:
namespaceprefix_ = self.Width_nsprefix_ + ':' if (UseCapturedNS_ and self.Width_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sWidth>%s</%sWidth>%s' % (namespaceprefix_ , self.gds_format_integer(self.Width, input_name='Width'), namespaceprefix_ , eol_))
if self.Height is not None:
namespaceprefix_ = self.Height_nsprefix_ + ':' if (UseCapturedNS_ and self.Height_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sHeight>%s</%sHeight>%s' % (namespaceprefix_ , self.gds_format_integer(self.Height, input_name='Height'), namespaceprefix_ , eol_))
if self.Units is not None:
namespaceprefix_ = self.Units_nsprefix_ + ':' if (UseCapturedNS_ and self.Units_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUnits>%s</%sUnits>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Units), input_name='Units')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Length' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Length')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Length')
self.Length = ival_
self.Length_nsprefix_ = child_.prefix
elif nodeName_ == 'Width' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Width')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Width')
self.Width = ival_
self.Width_nsprefix_ = child_.prefix
elif nodeName_ == 'Height' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Height')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Height')
self.Height = ival_
self.Height_nsprefix_ = child_.prefix
elif nodeName_ == 'Units':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Units')
value_ = self.gds_validate_string(value_, node, 'Units')
self.Units = value_
self.Units_nsprefix_ = child_.prefix
# validate type LinearUnits
self.validate_LinearUnits(self.Units)
# end class Dimensions
class Distance(GeneratedsSuper):
"""Driving or other transportation distances, distinct from dimension
measurements."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Value=None, Units=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Value = Value
self.Value_nsprefix_ = None
self.Units = Units
self.validate_DistanceUnits(self.Units)
self.Units_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Distance)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Distance.subclass:
return Distance.subclass(*args_, **kwargs_)
else:
return Distance(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Value(self):
return self.Value
def set_Value(self, Value):
self.Value = Value
def get_Units(self):
return self.Units
def set_Units(self, Units):
self.Units = Units
def validate_DistanceUnits(self, value):
result = True
# Validate type DistanceUnits, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['KM', 'MI']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on DistanceUnits' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Value is not None or
self.Units is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Distance', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Distance')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Distance':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Distance')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Distance', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Distance'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Distance', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Value is not None:
namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sValue>%s</%sValue>%s' % (namespaceprefix_ , self.gds_format_decimal(self.Value, input_name='Value'), namespaceprefix_ , eol_))
if self.Units is not None:
namespaceprefix_ = self.Units_nsprefix_ + ':' if (UseCapturedNS_ and self.Units_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUnits>%s</%sUnits>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Units), input_name='Units')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Value' and child_.text:
sval_ = child_.text
fval_ = self.gds_parse_decimal(sval_, node, 'Value')
fval_ = self.gds_validate_decimal(fval_, node, 'Value')
self.Value = fval_
self.Value_nsprefix_ = child_.prefix
elif nodeName_ == 'Units':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Units')
value_ = self.gds_validate_string(value_, node, 'Units')
self.Units = value_
self.Units_nsprefix_ = child_.prefix
# validate type DistanceUnits
self.validate_DistanceUnits(self.Units)
# end class Distance
class DistanceAndLocationDetail(GeneratedsSuper):
"""Specifies the location details and other information relevant to the
location that is derived from the inputs provided in the request."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Distance=None, ReservationAvailabilityDetail=None, SupportedRedirectToHoldServices=None, LocationDetail=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Distance = Distance
self.Distance_nsprefix_ = None
self.ReservationAvailabilityDetail = ReservationAvailabilityDetail
self.ReservationAvailabilityDetail_nsprefix_ = None
if SupportedRedirectToHoldServices is None:
self.SupportedRedirectToHoldServices = []
else:
self.SupportedRedirectToHoldServices = SupportedRedirectToHoldServices
self.SupportedRedirectToHoldServices_nsprefix_ = None
self.LocationDetail = LocationDetail
self.LocationDetail_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DistanceAndLocationDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DistanceAndLocationDetail.subclass:
return DistanceAndLocationDetail.subclass(*args_, **kwargs_)
else:
return DistanceAndLocationDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Distance(self):
return self.Distance
def set_Distance(self, Distance):
self.Distance = Distance
def get_ReservationAvailabilityDetail(self):
return self.ReservationAvailabilityDetail
def set_ReservationAvailabilityDetail(self, ReservationAvailabilityDetail):
self.ReservationAvailabilityDetail = ReservationAvailabilityDetail
def get_SupportedRedirectToHoldServices(self):
return self.SupportedRedirectToHoldServices
def set_SupportedRedirectToHoldServices(self, SupportedRedirectToHoldServices):
self.SupportedRedirectToHoldServices = SupportedRedirectToHoldServices
def add_SupportedRedirectToHoldServices(self, value):
self.SupportedRedirectToHoldServices.append(value)
def insert_SupportedRedirectToHoldServices_at(self, index, value):
self.SupportedRedirectToHoldServices.insert(index, value)
def replace_SupportedRedirectToHoldServices_at(self, index, value):
self.SupportedRedirectToHoldServices[index] = value
def get_LocationDetail(self):
return self.LocationDetail
def set_LocationDetail(self, LocationDetail):
self.LocationDetail = LocationDetail
def validate_SupportedRedirectToHoldServiceType(self, value):
result = True
# Validate type SupportedRedirectToHoldServiceType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FEDEX_EXPRESS', 'FEDEX_GROUND', 'FEDEX_GROUND_HOME_DELIVERY']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on SupportedRedirectToHoldServiceType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Distance is not None or
self.ReservationAvailabilityDetail is not None or
self.SupportedRedirectToHoldServices or
self.LocationDetail is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistanceAndLocationDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DistanceAndLocationDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'DistanceAndLocationDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DistanceAndLocationDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DistanceAndLocationDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DistanceAndLocationDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistanceAndLocationDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Distance is not None:
namespaceprefix_ = self.Distance_nsprefix_ + ':' if (UseCapturedNS_ and self.Distance_nsprefix_) else ''
self.Distance.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Distance', pretty_print=pretty_print)
if self.ReservationAvailabilityDetail is not None:
namespaceprefix_ = self.ReservationAvailabilityDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ReservationAvailabilityDetail_nsprefix_) else ''
self.ReservationAvailabilityDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ReservationAvailabilityDetail', pretty_print=pretty_print)
for SupportedRedirectToHoldServices_ in self.SupportedRedirectToHoldServices:
namespaceprefix_ = self.SupportedRedirectToHoldServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SupportedRedirectToHoldServices_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSupportedRedirectToHoldServices>%s</%sSupportedRedirectToHoldServices>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SupportedRedirectToHoldServices_), input_name='SupportedRedirectToHoldServices')), namespaceprefix_ , eol_))
if self.LocationDetail is not None:
namespaceprefix_ = self.LocationDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationDetail_nsprefix_) else ''
self.LocationDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LocationDetail', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Distance':
obj_ = Distance.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Distance = obj_
obj_.original_tagname_ = 'Distance'
elif nodeName_ == 'ReservationAvailabilityDetail':
obj_ = ReservationAvailabilityDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ReservationAvailabilityDetail = obj_
obj_.original_tagname_ = 'ReservationAvailabilityDetail'
elif nodeName_ == 'SupportedRedirectToHoldServices':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SupportedRedirectToHoldServices')
value_ = self.gds_validate_string(value_, node, 'SupportedRedirectToHoldServices')
self.SupportedRedirectToHoldServices.append(value_)
self.SupportedRedirectToHoldServices_nsprefix_ = child_.prefix
# validate type SupportedRedirectToHoldServiceType
self.validate_SupportedRedirectToHoldServiceType(self.SupportedRedirectToHoldServices[-1])
elif nodeName_ == 'LocationDetail':
obj_ = LocationDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LocationDetail = obj_
obj_.original_tagname_ = 'LocationDetail'
# end class DistanceAndLocationDetail
class Holiday(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Name=None, Date=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Name = Name
self.Name_nsprefix_ = None
if isinstance(Date, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Date, '%Y-%m-%d').date()
else:
initvalue_ = Date
self.Date = initvalue_
self.Date_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Holiday)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Holiday.subclass:
return Holiday.subclass(*args_, **kwargs_)
else:
return Holiday(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Name(self):
return self.Name
def set_Name(self, Name):
self.Name = Name
def get_Date(self):
return self.Date
def set_Date(self, Date):
self.Date = Date
def hasContent_(self):
if (
self.Name is not None or
self.Date is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Holiday', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Holiday')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Holiday':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Holiday')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Holiday', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Holiday'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Holiday', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Name is not None:
namespaceprefix_ = self.Name_nsprefix_ + ':' if (UseCapturedNS_ and self.Name_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sName>%s</%sName>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')), namespaceprefix_ , eol_))
if self.Date is not None:
namespaceprefix_ = self.Date_nsprefix_ + ':' if (UseCapturedNS_ and self.Date_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDate>%s</%sDate>%s' % (namespaceprefix_ , self.gds_format_date(self.Date, input_name='Date'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Name':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Name')
value_ = self.gds_validate_string(value_, node, 'Name')
self.Name = value_
self.Name_nsprefix_ = child_.prefix
elif nodeName_ == 'Date':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.Date = dval_
self.Date_nsprefix_ = child_.prefix
# end class Holiday
class LatestDropOffDetail(GeneratedsSuper):
"""Specifies the latest time by which a package can be dropped off at a
FedEx location."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, DayOfWeek=None, Time=None, Overlays=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.DayOfWeek = DayOfWeek
self.validate_DayOfWeekType(self.DayOfWeek)
self.DayOfWeek_nsprefix_ = None
if isinstance(Time, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Time, '%H:%M:%S').time()
else:
initvalue_ = Time
self.Time = initvalue_
self.Time_nsprefix_ = None
if Overlays is None:
self.Overlays = []
else:
self.Overlays = Overlays
self.Overlays_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LatestDropOffDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LatestDropOffDetail.subclass:
return LatestDropOffDetail.subclass(*args_, **kwargs_)
else:
return LatestDropOffDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_DayOfWeek(self):
return self.DayOfWeek
def set_DayOfWeek(self, DayOfWeek):
self.DayOfWeek = DayOfWeek
def get_Time(self):
return self.Time
def set_Time(self, Time):
self.Time = Time
def get_Overlays(self):
return self.Overlays
def set_Overlays(self, Overlays):
self.Overlays = Overlays
def add_Overlays(self, value):
self.Overlays.append(value)
def insert_Overlays_at(self, index, value):
self.Overlays.insert(index, value)
def replace_Overlays_at(self, index, value):
self.Overlays[index] = value
def validate_DayOfWeekType(self, value):
result = True
# Validate type DayOfWeekType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FRI', 'MON', 'SAT', 'SUN', 'THU', 'TUE', 'WED']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on DayOfWeekType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.DayOfWeek is not None or
self.Time is not None or
self.Overlays
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LatestDropOffDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LatestDropOffDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LatestDropOffDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LatestDropOffDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LatestDropOffDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LatestDropOffDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LatestDropOffDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DayOfWeek is not None:
namespaceprefix_ = self.DayOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayOfWeek_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDayOfWeek>%s</%sDayOfWeek>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayOfWeek), input_name='DayOfWeek')), namespaceprefix_ , eol_))
if self.Time is not None:
namespaceprefix_ = self.Time_nsprefix_ + ':' if (UseCapturedNS_ and self.Time_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTime>%s</%sTime>%s' % (namespaceprefix_ , self.gds_format_time(self.Time, input_name='Time'), namespaceprefix_ , eol_))
for Overlays_ in self.Overlays:
namespaceprefix_ = self.Overlays_nsprefix_ + ':' if (UseCapturedNS_ and self.Overlays_nsprefix_) else ''
Overlays_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Overlays', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'DayOfWeek':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DayOfWeek')
value_ = self.gds_validate_string(value_, node, 'DayOfWeek')
self.DayOfWeek = value_
self.DayOfWeek_nsprefix_ = child_.prefix
# validate type DayOfWeekType
self.validate_DayOfWeekType(self.DayOfWeek)
elif nodeName_ == 'Time':
sval_ = child_.text
dval_ = self.gds_parse_time(sval_)
self.Time = dval_
self.Time_nsprefix_ = child_.prefix
elif nodeName_ == 'Overlays':
obj_ = LatestDropoffOverlayDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Overlays.append(obj_)
obj_.original_tagname_ = 'Overlays'
# end class LatestDropOffDetail
class LatestDropoffOverlayDetail(GeneratedsSuper):
"""Specifies the time and reason to overlay the last drop off time for a
carrier at a FedEx location."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Type=None, Time=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Type = Type
self.validate_LatestDropOffOverlayType(self.Type)
self.Type_nsprefix_ = None
if isinstance(Time, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Time, '%H:%M:%S').time()
else:
initvalue_ = Time
self.Time = initvalue_
self.Time_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LatestDropoffOverlayDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LatestDropoffOverlayDetail.subclass:
return LatestDropoffOverlayDetail.subclass(*args_, **kwargs_)
else:
return LatestDropoffOverlayDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Type(self):
return self.Type
def set_Type(self, Type):
self.Type = Type
def get_Time(self):
return self.Time
def set_Time(self, Time):
self.Time = Time
def validate_LatestDropOffOverlayType(self, value):
result = True
# Validate type LatestDropOffOverlayType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['US_WEST_COAST']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LatestDropOffOverlayType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Type is not None or
self.Time is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LatestDropoffOverlayDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LatestDropoffOverlayDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LatestDropoffOverlayDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LatestDropoffOverlayDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LatestDropoffOverlayDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LatestDropoffOverlayDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LatestDropoffOverlayDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Type is not None:
namespaceprefix_ = self.Type_nsprefix_ + ':' if (UseCapturedNS_ and self.Type_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sType>%s</%sType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Type), input_name='Type')), namespaceprefix_ , eol_))
if self.Time is not None:
namespaceprefix_ = self.Time_nsprefix_ + ':' if (UseCapturedNS_ and self.Time_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTime>%s</%sTime>%s' % (namespaceprefix_ , self.gds_format_time(self.Time, input_name='Time'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Type':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Type')
value_ = self.gds_validate_string(value_, node, 'Type')
self.Type = value_
self.Type_nsprefix_ = child_.prefix
# validate type LatestDropOffOverlayType
self.validate_LatestDropOffOverlayType(self.Type)
elif nodeName_ == 'Time':
sval_ = child_.text
dval_ = self.gds_parse_time(sval_)
self.Time = dval_
self.Time_nsprefix_ = child_.prefix
# end class LatestDropoffOverlayDetail
class Localization(GeneratedsSuper):
"""Identifies the representation of human-readable text."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, LanguageCode=None, LocaleCode=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.LanguageCode = LanguageCode
self.LanguageCode_nsprefix_ = None
self.LocaleCode = LocaleCode
self.LocaleCode_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Localization)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Localization.subclass:
return Localization.subclass(*args_, **kwargs_)
else:
return Localization(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_LanguageCode(self):
return self.LanguageCode
def set_LanguageCode(self, LanguageCode):
self.LanguageCode = LanguageCode
def get_LocaleCode(self):
return self.LocaleCode
def set_LocaleCode(self, LocaleCode):
self.LocaleCode = LocaleCode
def hasContent_(self):
if (
self.LanguageCode is not None or
self.LocaleCode is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Localization', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Localization')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Localization':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Localization')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Localization', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Localization'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Localization', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LanguageCode is not None:
namespaceprefix_ = self.LanguageCode_nsprefix_ + ':' if (UseCapturedNS_ and self.LanguageCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLanguageCode>%s</%sLanguageCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LanguageCode), input_name='LanguageCode')), namespaceprefix_ , eol_))
if self.LocaleCode is not None:
namespaceprefix_ = self.LocaleCode_nsprefix_ + ':' if (UseCapturedNS_ and self.LocaleCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocaleCode>%s</%sLocaleCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocaleCode), input_name='LocaleCode')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'LanguageCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LanguageCode')
value_ = self.gds_validate_string(value_, node, 'LanguageCode')
self.LanguageCode = value_
self.LanguageCode_nsprefix_ = child_.prefix
elif nodeName_ == 'LocaleCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocaleCode')
value_ = self.gds_validate_string(value_, node, 'LocaleCode')
self.LocaleCode = value_
self.LocaleCode_nsprefix_ = child_.prefix
# end class Localization
class LocationCapabilityDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, CarrierCode=None, ServiceType=None, ServiceCategory=None, TransferOfPossessionType=None, DaysOfWeek=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.CarrierCode = CarrierCode
self.validate_CarrierCodeType(self.CarrierCode)
self.CarrierCode_nsprefix_ = None
self.ServiceType = ServiceType
self.ServiceType_nsprefix_ = None
self.ServiceCategory = ServiceCategory
self.validate_ServiceCategoryType(self.ServiceCategory)
self.ServiceCategory_nsprefix_ = None
self.TransferOfPossessionType = TransferOfPossessionType
self.validate_LocationTransferOfPossessionType(self.TransferOfPossessionType)
self.TransferOfPossessionType_nsprefix_ = None
if DaysOfWeek is None:
self.DaysOfWeek = []
else:
self.DaysOfWeek = DaysOfWeek
self.DaysOfWeek_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationCapabilityDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationCapabilityDetail.subclass:
return LocationCapabilityDetail.subclass(*args_, **kwargs_)
else:
return LocationCapabilityDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_CarrierCode(self):
return self.CarrierCode
def set_CarrierCode(self, CarrierCode):
self.CarrierCode = CarrierCode
def get_ServiceType(self):
return self.ServiceType
def set_ServiceType(self, ServiceType):
self.ServiceType = ServiceType
def get_ServiceCategory(self):
return self.ServiceCategory
def set_ServiceCategory(self, ServiceCategory):
self.ServiceCategory = ServiceCategory
def get_TransferOfPossessionType(self):
return self.TransferOfPossessionType
def set_TransferOfPossessionType(self, TransferOfPossessionType):
self.TransferOfPossessionType = TransferOfPossessionType
def get_DaysOfWeek(self):
return self.DaysOfWeek
def set_DaysOfWeek(self, DaysOfWeek):
self.DaysOfWeek = DaysOfWeek
def add_DaysOfWeek(self, value):
self.DaysOfWeek.append(value)
def insert_DaysOfWeek_at(self, index, value):
self.DaysOfWeek.insert(index, value)
def replace_DaysOfWeek_at(self, index, value):
self.DaysOfWeek[index] = value
def validate_CarrierCodeType(self, value):
result = True
# Validate type CarrierCodeType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FDXC', 'FDXE', 'FDXG', 'FXCC', 'FXFR', 'FXSP']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on CarrierCodeType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_ServiceCategoryType(self, value):
result = True
# Validate type ServiceCategoryType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['EXPRESS_FREIGHT', 'EXPRESS_PARCEL', 'GROUND_HOME_DELIVERY']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ServiceCategoryType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_LocationTransferOfPossessionType(self, value):
result = True
# Validate type LocationTransferOfPossessionType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['DROPOFF', 'HOLD_AT_LOCATION', 'REDIRECT_TO_HOLD_AT_LOCATION']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationTransferOfPossessionType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_DayOfWeekType(self, value):
result = True
# Validate type DayOfWeekType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FRI', 'MON', 'SAT', 'SUN', 'THU', 'TUE', 'WED']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on DayOfWeekType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.CarrierCode is not None or
self.ServiceType is not None or
self.ServiceCategory is not None or
self.TransferOfPossessionType is not None or
self.DaysOfWeek
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationCapabilityDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationCapabilityDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationCapabilityDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationCapabilityDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationCapabilityDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationCapabilityDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationCapabilityDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CarrierCode is not None:
namespaceprefix_ = self.CarrierCode_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCarrierCode>%s</%sCarrierCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierCode), input_name='CarrierCode')), namespaceprefix_ , eol_))
if self.ServiceType is not None:
namespaceprefix_ = self.ServiceType_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServiceType>%s</%sServiceType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceType), input_name='ServiceType')), namespaceprefix_ , eol_))
if self.ServiceCategory is not None:
namespaceprefix_ = self.ServiceCategory_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceCategory_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServiceCategory>%s</%sServiceCategory>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceCategory), input_name='ServiceCategory')), namespaceprefix_ , eol_))
if self.TransferOfPossessionType is not None:
namespaceprefix_ = self.TransferOfPossessionType_nsprefix_ + ':' if (UseCapturedNS_ and self.TransferOfPossessionType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTransferOfPossessionType>%s</%sTransferOfPossessionType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TransferOfPossessionType), input_name='TransferOfPossessionType')), namespaceprefix_ , eol_))
for DaysOfWeek_ in self.DaysOfWeek:
namespaceprefix_ = self.DaysOfWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DaysOfWeek_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDaysOfWeek>%s</%sDaysOfWeek>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(DaysOfWeek_), input_name='DaysOfWeek')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'CarrierCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CarrierCode')
value_ = self.gds_validate_string(value_, node, 'CarrierCode')
self.CarrierCode = value_
self.CarrierCode_nsprefix_ = child_.prefix
# validate type CarrierCodeType
self.validate_CarrierCodeType(self.CarrierCode)
elif nodeName_ == 'ServiceType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServiceType')
value_ = self.gds_validate_string(value_, node, 'ServiceType')
self.ServiceType = value_
self.ServiceType_nsprefix_ = child_.prefix
elif nodeName_ == 'ServiceCategory':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServiceCategory')
value_ = self.gds_validate_string(value_, node, 'ServiceCategory')
self.ServiceCategory = value_
self.ServiceCategory_nsprefix_ = child_.prefix
# validate type ServiceCategoryType
self.validate_ServiceCategoryType(self.ServiceCategory)
elif nodeName_ == 'TransferOfPossessionType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TransferOfPossessionType')
value_ = self.gds_validate_string(value_, node, 'TransferOfPossessionType')
self.TransferOfPossessionType = value_
self.TransferOfPossessionType_nsprefix_ = child_.prefix
# validate type LocationTransferOfPossessionType
self.validate_LocationTransferOfPossessionType(self.TransferOfPossessionType)
elif nodeName_ == 'DaysOfWeek':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DaysOfWeek')
value_ = self.gds_validate_string(value_, node, 'DaysOfWeek')
self.DaysOfWeek.append(value_)
self.DaysOfWeek_nsprefix_ = child_.prefix
# validate type DayOfWeekType
self.validate_DayOfWeekType(self.DaysOfWeek[-1])
# end class LocationCapabilityDetail
class LocationContactAndAddress(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Contact=None, Address=None, AddressAncillaryDetail=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Contact = Contact
self.Contact_nsprefix_ = None
self.Address = Address
self.Address_nsprefix_ = None
self.AddressAncillaryDetail = AddressAncillaryDetail
self.AddressAncillaryDetail_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationContactAndAddress)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationContactAndAddress.subclass:
return LocationContactAndAddress.subclass(*args_, **kwargs_)
else:
return LocationContactAndAddress(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Contact(self):
return self.Contact
def set_Contact(self, Contact):
self.Contact = Contact
def get_Address(self):
return self.Address
def set_Address(self, Address):
self.Address = Address
def get_AddressAncillaryDetail(self):
return self.AddressAncillaryDetail
def set_AddressAncillaryDetail(self, AddressAncillaryDetail):
self.AddressAncillaryDetail = AddressAncillaryDetail
def hasContent_(self):
if (
self.Contact is not None or
self.Address is not None or
self.AddressAncillaryDetail is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationContactAndAddress', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationContactAndAddress')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationContactAndAddress':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationContactAndAddress')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationContactAndAddress', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationContactAndAddress'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationContactAndAddress', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Contact is not None:
namespaceprefix_ = self.Contact_nsprefix_ + ':' if (UseCapturedNS_ and self.Contact_nsprefix_) else ''
self.Contact.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Contact', pretty_print=pretty_print)
if self.Address is not None:
namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else ''
self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print)
if self.AddressAncillaryDetail is not None:
namespaceprefix_ = self.AddressAncillaryDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.AddressAncillaryDetail_nsprefix_) else ''
self.AddressAncillaryDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AddressAncillaryDetail', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Contact':
obj_ = Contact.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Contact = obj_
obj_.original_tagname_ = 'Contact'
elif nodeName_ == 'Address':
obj_ = Address.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Address = obj_
obj_.original_tagname_ = 'Address'
elif nodeName_ == 'AddressAncillaryDetail':
obj_ = AddressAncillaryDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AddressAncillaryDetail = obj_
obj_.original_tagname_ = 'AddressAncillaryDetail'
# end class LocationContactAndAddress
class LocationDetail(GeneratedsSuper):
"""Describes an individual location providing a set of customer service
features."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, LocationId=None, StoreNumber=None, LocationContactAndAddress=None, SpecialInstructions=None, TimeZoneOffset=None, LocationType=None, LocationTypeForDisplay=None, Attributes=None, LocationCapabilities=None, PackageMaximumLimits=None, ClearanceLocationDetail=None, ServicingLocationDetails=None, AcceptedCurrency=None, LocationHolidays=None, MapUrl=None, EntityId=None, NormalHours=None, ExceptionalHours=None, HoursForEffectiveDate=None, CarrierDetails=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.LocationId = LocationId
self.LocationId_nsprefix_ = None
self.StoreNumber = StoreNumber
self.StoreNumber_nsprefix_ = None
self.LocationContactAndAddress = LocationContactAndAddress
self.LocationContactAndAddress_nsprefix_ = None
self.SpecialInstructions = SpecialInstructions
self.SpecialInstructions_nsprefix_ = None
self.TimeZoneOffset = TimeZoneOffset
self.TimeZoneOffset_nsprefix_ = None
self.LocationType = LocationType
self.validate_FedExLocationType(self.LocationType)
self.LocationType_nsprefix_ = None
self.LocationTypeForDisplay = LocationTypeForDisplay
self.LocationTypeForDisplay_nsprefix_ = None
if Attributes is None:
self.Attributes = []
else:
self.Attributes = Attributes
self.Attributes_nsprefix_ = None
if LocationCapabilities is None:
self.LocationCapabilities = []
else:
self.LocationCapabilities = LocationCapabilities
self.LocationCapabilities_nsprefix_ = None
self.PackageMaximumLimits = PackageMaximumLimits
self.PackageMaximumLimits_nsprefix_ = None
self.ClearanceLocationDetail = ClearanceLocationDetail
self.ClearanceLocationDetail_nsprefix_ = None
if ServicingLocationDetails is None:
self.ServicingLocationDetails = []
else:
self.ServicingLocationDetails = ServicingLocationDetails
self.ServicingLocationDetails_nsprefix_ = None
self.AcceptedCurrency = AcceptedCurrency
self.AcceptedCurrency_nsprefix_ = None
if LocationHolidays is None:
self.LocationHolidays = []
else:
self.LocationHolidays = LocationHolidays
self.LocationHolidays_nsprefix_ = None
self.MapUrl = MapUrl
self.MapUrl_nsprefix_ = None
self.EntityId = EntityId
self.EntityId_nsprefix_ = None
if NormalHours is None:
self.NormalHours = []
else:
self.NormalHours = NormalHours
self.NormalHours_nsprefix_ = None
if ExceptionalHours is None:
self.ExceptionalHours = []
else:
self.ExceptionalHours = ExceptionalHours
self.ExceptionalHours_nsprefix_ = None
if HoursForEffectiveDate is None:
self.HoursForEffectiveDate = []
else:
self.HoursForEffectiveDate = HoursForEffectiveDate
self.HoursForEffectiveDate_nsprefix_ = None
if CarrierDetails is None:
self.CarrierDetails = []
else:
self.CarrierDetails = CarrierDetails
self.CarrierDetails_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationDetail.subclass:
return LocationDetail.subclass(*args_, **kwargs_)
else:
return LocationDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_LocationId(self):
return self.LocationId
def set_LocationId(self, LocationId):
self.LocationId = LocationId
def get_StoreNumber(self):
return self.StoreNumber
def set_StoreNumber(self, StoreNumber):
self.StoreNumber = StoreNumber
def get_LocationContactAndAddress(self):
return self.LocationContactAndAddress
def set_LocationContactAndAddress(self, LocationContactAndAddress):
self.LocationContactAndAddress = LocationContactAndAddress
def get_SpecialInstructions(self):
return self.SpecialInstructions
def set_SpecialInstructions(self, SpecialInstructions):
self.SpecialInstructions = SpecialInstructions
def get_TimeZoneOffset(self):
return self.TimeZoneOffset
def set_TimeZoneOffset(self, TimeZoneOffset):
self.TimeZoneOffset = TimeZoneOffset
def get_LocationType(self):
return self.LocationType
def set_LocationType(self, LocationType):
self.LocationType = LocationType
def get_LocationTypeForDisplay(self):
return self.LocationTypeForDisplay
def set_LocationTypeForDisplay(self, LocationTypeForDisplay):
self.LocationTypeForDisplay = LocationTypeForDisplay
def get_Attributes(self):
return self.Attributes
def set_Attributes(self, Attributes):
self.Attributes = Attributes
def add_Attributes(self, value):
self.Attributes.append(value)
def insert_Attributes_at(self, index, value):
self.Attributes.insert(index, value)
def replace_Attributes_at(self, index, value):
self.Attributes[index] = value
def get_LocationCapabilities(self):
return self.LocationCapabilities
def set_LocationCapabilities(self, LocationCapabilities):
self.LocationCapabilities = LocationCapabilities
def add_LocationCapabilities(self, value):
self.LocationCapabilities.append(value)
def insert_LocationCapabilities_at(self, index, value):
self.LocationCapabilities.insert(index, value)
def replace_LocationCapabilities_at(self, index, value):
self.LocationCapabilities[index] = value
def get_PackageMaximumLimits(self):
return self.PackageMaximumLimits
def set_PackageMaximumLimits(self, PackageMaximumLimits):
self.PackageMaximumLimits = PackageMaximumLimits
def get_ClearanceLocationDetail(self):
return self.ClearanceLocationDetail
def set_ClearanceLocationDetail(self, ClearanceLocationDetail):
self.ClearanceLocationDetail = ClearanceLocationDetail
def get_ServicingLocationDetails(self):
return self.ServicingLocationDetails
def set_ServicingLocationDetails(self, ServicingLocationDetails):
self.ServicingLocationDetails = ServicingLocationDetails
def add_ServicingLocationDetails(self, value):
self.ServicingLocationDetails.append(value)
def insert_ServicingLocationDetails_at(self, index, value):
self.ServicingLocationDetails.insert(index, value)
def replace_ServicingLocationDetails_at(self, index, value):
self.ServicingLocationDetails[index] = value
def get_AcceptedCurrency(self):
return self.AcceptedCurrency
def set_AcceptedCurrency(self, AcceptedCurrency):
self.AcceptedCurrency = AcceptedCurrency
def get_LocationHolidays(self):
return self.LocationHolidays
def set_LocationHolidays(self, LocationHolidays):
self.LocationHolidays = LocationHolidays
def add_LocationHolidays(self, value):
self.LocationHolidays.append(value)
def insert_LocationHolidays_at(self, index, value):
self.LocationHolidays.insert(index, value)
def replace_LocationHolidays_at(self, index, value):
self.LocationHolidays[index] = value
def get_MapUrl(self):
return self.MapUrl
def set_MapUrl(self, MapUrl):
self.MapUrl = MapUrl
def get_EntityId(self):
return self.EntityId
def set_EntityId(self, EntityId):
self.EntityId = EntityId
def get_NormalHours(self):
return self.NormalHours
def set_NormalHours(self, NormalHours):
self.NormalHours = NormalHours
def add_NormalHours(self, value):
self.NormalHours.append(value)
def insert_NormalHours_at(self, index, value):
self.NormalHours.insert(index, value)
def replace_NormalHours_at(self, index, value):
self.NormalHours[index] = value
def get_ExceptionalHours(self):
return self.ExceptionalHours
def set_ExceptionalHours(self, ExceptionalHours):
self.ExceptionalHours = ExceptionalHours
def add_ExceptionalHours(self, value):
self.ExceptionalHours.append(value)
def insert_ExceptionalHours_at(self, index, value):
self.ExceptionalHours.insert(index, value)
def replace_ExceptionalHours_at(self, index, value):
self.ExceptionalHours[index] = value
def get_HoursForEffectiveDate(self):
return self.HoursForEffectiveDate
def set_HoursForEffectiveDate(self, HoursForEffectiveDate):
self.HoursForEffectiveDate = HoursForEffectiveDate
def add_HoursForEffectiveDate(self, value):
self.HoursForEffectiveDate.append(value)
def insert_HoursForEffectiveDate_at(self, index, value):
self.HoursForEffectiveDate.insert(index, value)
def replace_HoursForEffectiveDate_at(self, index, value):
self.HoursForEffectiveDate[index] = value
def get_CarrierDetails(self):
return self.CarrierDetails
def set_CarrierDetails(self, CarrierDetails):
self.CarrierDetails = CarrierDetails
def add_CarrierDetails(self, value):
self.CarrierDetails.append(value)
def insert_CarrierDetails_at(self, index, value):
self.CarrierDetails.insert(index, value)
def replace_CarrierDetails_at(self, index, value):
self.CarrierDetails[index] = value
def validate_FedExLocationType(self, value):
result = True
# Validate type FedExLocationType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FEDEX_AUTHORIZED_SHIP_CENTER', 'FEDEX_EXPRESS_STATION', 'FEDEX_FACILITY', 'FEDEX_FREIGHT_SERVICE_CENTER', 'FEDEX_GROUND_TERMINAL', 'FEDEX_HOME_DELIVERY_STATION', 'FEDEX_OFFICE', 'FEDEX_ONSITE', 'FEDEX_SELF_SERVICE_LOCATION', 'FEDEX_SHIPSITE', 'FEDEX_SHIP_AND_GET', 'FEDEX_SMART_POST_HUB']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on FedExLocationType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_LocationAttributesType(self, value):
result = True
# Validate type LocationAttributesType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ACCEPTS_CASH', 'ALREADY_OPEN', 'CLEARANCE_SERVICES', 'COPY_AND_PRINT_SERVICES', 'DANGEROUS_GOODS_SERVICES', 'DIRECT_MAIL_SERVICES', 'DOMESTIC_SHIPPING_SERVICES', 'DROP_BOX', 'INTERNATIONAL_SHIPPING_SERVICES', 'LOCATION_IS_IN_AIRPORT', 'NOTARY_SERVICES', 'OBSERVES_DAY_LIGHT_SAVING_TIMES', 'OPEN_TWENTY_FOUR_HOURS', 'PACKAGING_SUPPLIES', 'PACK_AND_SHIP', 'PASSPORT_PHOTO_SERVICES', 'RETURNS_SERVICES', 'SIGNS_AND_BANNERS_SERVICE', 'SONY_PICTURE_STATION']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationAttributesType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.LocationId is not None or
self.StoreNumber is not None or
self.LocationContactAndAddress is not None or
self.SpecialInstructions is not None or
self.TimeZoneOffset is not None or
self.LocationType is not None or
self.LocationTypeForDisplay is not None or
self.Attributes or
self.LocationCapabilities or
self.PackageMaximumLimits is not None or
self.ClearanceLocationDetail is not None or
self.ServicingLocationDetails or
self.AcceptedCurrency is not None or
self.LocationHolidays or
self.MapUrl is not None or
self.EntityId is not None or
self.NormalHours or
self.ExceptionalHours or
self.HoursForEffectiveDate or
self.CarrierDetails
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LocationId is not None:
namespaceprefix_ = self.LocationId_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationId>%s</%sLocationId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationId), input_name='LocationId')), namespaceprefix_ , eol_))
if self.StoreNumber is not None:
namespaceprefix_ = self.StoreNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.StoreNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sStoreNumber>%s</%sStoreNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StoreNumber), input_name='StoreNumber')), namespaceprefix_ , eol_))
if self.LocationContactAndAddress is not None:
namespaceprefix_ = self.LocationContactAndAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationContactAndAddress_nsprefix_) else ''
self.LocationContactAndAddress.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LocationContactAndAddress', pretty_print=pretty_print)
if self.SpecialInstructions is not None:
namespaceprefix_ = self.SpecialInstructions_nsprefix_ + ':' if (UseCapturedNS_ and self.SpecialInstructions_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSpecialInstructions>%s</%sSpecialInstructions>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SpecialInstructions), input_name='SpecialInstructions')), namespaceprefix_ , eol_))
if self.TimeZoneOffset is not None:
namespaceprefix_ = self.TimeZoneOffset_nsprefix_ + ':' if (UseCapturedNS_ and self.TimeZoneOffset_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimeZoneOffset>%s</%sTimeZoneOffset>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TimeZoneOffset), input_name='TimeZoneOffset')), namespaceprefix_ , eol_))
if self.LocationType is not None:
namespaceprefix_ = self.LocationType_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationType_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationType>%s</%sLocationType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationType), input_name='LocationType')), namespaceprefix_ , eol_))
if self.LocationTypeForDisplay is not None:
namespaceprefix_ = self.LocationTypeForDisplay_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationTypeForDisplay_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationTypeForDisplay>%s</%sLocationTypeForDisplay>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationTypeForDisplay), input_name='LocationTypeForDisplay')), namespaceprefix_ , eol_))
for Attributes_ in self.Attributes:
namespaceprefix_ = self.Attributes_nsprefix_ + ':' if (UseCapturedNS_ and self.Attributes_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAttributes>%s</%sAttributes>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Attributes_), input_name='Attributes')), namespaceprefix_ , eol_))
for LocationCapabilities_ in self.LocationCapabilities:
namespaceprefix_ = self.LocationCapabilities_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationCapabilities_nsprefix_) else ''
LocationCapabilities_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LocationCapabilities', pretty_print=pretty_print)
if self.PackageMaximumLimits is not None:
namespaceprefix_ = self.PackageMaximumLimits_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageMaximumLimits_nsprefix_) else ''
self.PackageMaximumLimits.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PackageMaximumLimits', pretty_print=pretty_print)
if self.ClearanceLocationDetail is not None:
namespaceprefix_ = self.ClearanceLocationDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ClearanceLocationDetail_nsprefix_) else ''
self.ClearanceLocationDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ClearanceLocationDetail', pretty_print=pretty_print)
for ServicingLocationDetails_ in self.ServicingLocationDetails:
namespaceprefix_ = self.ServicingLocationDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.ServicingLocationDetails_nsprefix_) else ''
ServicingLocationDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ServicingLocationDetails', pretty_print=pretty_print)
if self.AcceptedCurrency is not None:
namespaceprefix_ = self.AcceptedCurrency_nsprefix_ + ':' if (UseCapturedNS_ and self.AcceptedCurrency_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAcceptedCurrency>%s</%sAcceptedCurrency>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AcceptedCurrency), input_name='AcceptedCurrency')), namespaceprefix_ , eol_))
for LocationHolidays_ in self.LocationHolidays:
namespaceprefix_ = self.LocationHolidays_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationHolidays_nsprefix_) else ''
LocationHolidays_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LocationHolidays', pretty_print=pretty_print)
if self.MapUrl is not None:
namespaceprefix_ = self.MapUrl_nsprefix_ + ':' if (UseCapturedNS_ and self.MapUrl_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMapUrl>%s</%sMapUrl>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MapUrl), input_name='MapUrl')), namespaceprefix_ , eol_))
if self.EntityId is not None:
namespaceprefix_ = self.EntityId_nsprefix_ + ':' if (UseCapturedNS_ and self.EntityId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sEntityId>%s</%sEntityId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EntityId), input_name='EntityId')), namespaceprefix_ , eol_))
for NormalHours_ in self.NormalHours:
namespaceprefix_ = self.NormalHours_nsprefix_ + ':' if (UseCapturedNS_ and self.NormalHours_nsprefix_) else ''
NormalHours_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NormalHours', pretty_print=pretty_print)
for ExceptionalHours_ in self.ExceptionalHours:
namespaceprefix_ = self.ExceptionalHours_nsprefix_ + ':' if (UseCapturedNS_ and self.ExceptionalHours_nsprefix_) else ''
ExceptionalHours_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ExceptionalHours', pretty_print=pretty_print)
for HoursForEffectiveDate_ in self.HoursForEffectiveDate:
namespaceprefix_ = self.HoursForEffectiveDate_nsprefix_ + ':' if (UseCapturedNS_ and self.HoursForEffectiveDate_nsprefix_) else ''
HoursForEffectiveDate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HoursForEffectiveDate', pretty_print=pretty_print)
for CarrierDetails_ in self.CarrierDetails:
namespaceprefix_ = self.CarrierDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierDetails_nsprefix_) else ''
CarrierDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CarrierDetails', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'LocationId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationId')
value_ = self.gds_validate_string(value_, node, 'LocationId')
self.LocationId = value_
self.LocationId_nsprefix_ = child_.prefix
elif nodeName_ == 'StoreNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StoreNumber')
value_ = self.gds_validate_string(value_, node, 'StoreNumber')
self.StoreNumber = value_
self.StoreNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationContactAndAddress':
obj_ = LocationContactAndAddress.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LocationContactAndAddress = obj_
obj_.original_tagname_ = 'LocationContactAndAddress'
elif nodeName_ == 'SpecialInstructions':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SpecialInstructions')
value_ = self.gds_validate_string(value_, node, 'SpecialInstructions')
self.SpecialInstructions = value_
self.SpecialInstructions_nsprefix_ = child_.prefix
elif nodeName_ == 'TimeZoneOffset':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TimeZoneOffset')
value_ = self.gds_validate_string(value_, node, 'TimeZoneOffset')
self.TimeZoneOffset = value_
self.TimeZoneOffset_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationType':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationType')
value_ = self.gds_validate_string(value_, node, 'LocationType')
self.LocationType = value_
self.LocationType_nsprefix_ = child_.prefix
# validate type FedExLocationType
self.validate_FedExLocationType(self.LocationType)
elif nodeName_ == 'LocationTypeForDisplay':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationTypeForDisplay')
value_ = self.gds_validate_string(value_, node, 'LocationTypeForDisplay')
self.LocationTypeForDisplay = value_
self.LocationTypeForDisplay_nsprefix_ = child_.prefix
elif nodeName_ == 'Attributes':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Attributes')
value_ = self.gds_validate_string(value_, node, 'Attributes')
self.Attributes.append(value_)
self.Attributes_nsprefix_ = child_.prefix
# validate type LocationAttributesType
self.validate_LocationAttributesType(self.Attributes[-1])
elif nodeName_ == 'LocationCapabilities':
obj_ = LocationCapabilityDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LocationCapabilities.append(obj_)
obj_.original_tagname_ = 'LocationCapabilities'
elif nodeName_ == 'PackageMaximumLimits':
obj_ = LocationPackageLimitsDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.PackageMaximumLimits = obj_
obj_.original_tagname_ = 'PackageMaximumLimits'
elif nodeName_ == 'ClearanceLocationDetail':
obj_ = ClearanceLocationDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ClearanceLocationDetail = obj_
obj_.original_tagname_ = 'ClearanceLocationDetail'
elif nodeName_ == 'ServicingLocationDetails':
obj_ = LocationIdentificationDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ServicingLocationDetails.append(obj_)
obj_.original_tagname_ = 'ServicingLocationDetails'
elif nodeName_ == 'AcceptedCurrency':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AcceptedCurrency')
value_ = self.gds_validate_string(value_, node, 'AcceptedCurrency')
self.AcceptedCurrency = value_
self.AcceptedCurrency_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationHolidays':
obj_ = Holiday.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LocationHolidays.append(obj_)
obj_.original_tagname_ = 'LocationHolidays'
elif nodeName_ == 'MapUrl':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MapUrl')
value_ = self.gds_validate_string(value_, node, 'MapUrl')
self.MapUrl = value_
self.MapUrl_nsprefix_ = child_.prefix
elif nodeName_ == 'EntityId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'EntityId')
value_ = self.gds_validate_string(value_, node, 'EntityId')
self.EntityId = value_
self.EntityId_nsprefix_ = child_.prefix
elif nodeName_ == 'NormalHours':
obj_ = LocationHours.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NormalHours.append(obj_)
obj_.original_tagname_ = 'NormalHours'
elif nodeName_ == 'ExceptionalHours':
obj_ = LocationHours.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ExceptionalHours.append(obj_)
obj_.original_tagname_ = 'ExceptionalHours'
elif nodeName_ == 'HoursForEffectiveDate':
obj_ = LocationHours.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.HoursForEffectiveDate.append(obj_)
obj_.original_tagname_ = 'HoursForEffectiveDate'
elif nodeName_ == 'CarrierDetails':
obj_ = CarrierDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.CarrierDetails.append(obj_)
obj_.original_tagname_ = 'CarrierDetails'
# end class LocationDetail
class LocationHours(GeneratedsSuper):
"""Specifies the location hours for a location."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, DayofWeek=None, OperationalHours=None, Hours=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.DayofWeek = DayofWeek
self.validate_DayOfWeekType(self.DayofWeek)
self.DayofWeek_nsprefix_ = None
self.OperationalHours = OperationalHours
self.validate_OperationalHoursType(self.OperationalHours)
self.OperationalHours_nsprefix_ = None
if Hours is None:
self.Hours = []
else:
self.Hours = Hours
self.Hours_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationHours)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationHours.subclass:
return LocationHours.subclass(*args_, **kwargs_)
else:
return LocationHours(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_DayofWeek(self):
return self.DayofWeek
def set_DayofWeek(self, DayofWeek):
self.DayofWeek = DayofWeek
def get_OperationalHours(self):
return self.OperationalHours
def set_OperationalHours(self, OperationalHours):
self.OperationalHours = OperationalHours
def get_Hours(self):
return self.Hours
def set_Hours(self, Hours):
self.Hours = Hours
def add_Hours(self, value):
self.Hours.append(value)
def insert_Hours_at(self, index, value):
self.Hours.insert(index, value)
def replace_Hours_at(self, index, value):
self.Hours[index] = value
def validate_DayOfWeekType(self, value):
result = True
# Validate type DayOfWeekType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FRI', 'MON', 'SAT', 'SUN', 'THU', 'TUE', 'WED']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on DayOfWeekType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_OperationalHoursType(self, value):
result = True
# Validate type OperationalHoursType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['CLOSED_ALL_DAY', 'OPEN_ALL_DAY', 'OPEN_BY_HOURS']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on OperationalHoursType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.DayofWeek is not None or
self.OperationalHours is not None or
self.Hours
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationHours', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationHours')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationHours':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationHours')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationHours', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationHours'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationHours', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DayofWeek is not None:
namespaceprefix_ = self.DayofWeek_nsprefix_ + ':' if (UseCapturedNS_ and self.DayofWeek_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDayofWeek>%s</%sDayofWeek>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DayofWeek), input_name='DayofWeek')), namespaceprefix_ , eol_))
if self.OperationalHours is not None:
namespaceprefix_ = self.OperationalHours_nsprefix_ + ':' if (UseCapturedNS_ and self.OperationalHours_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sOperationalHours>%s</%sOperationalHours>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OperationalHours), input_name='OperationalHours')), namespaceprefix_ , eol_))
for Hours_ in self.Hours:
namespaceprefix_ = self.Hours_nsprefix_ + ':' if (UseCapturedNS_ and self.Hours_nsprefix_) else ''
Hours_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Hours', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'DayofWeek':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DayofWeek')
value_ = self.gds_validate_string(value_, node, 'DayofWeek')
self.DayofWeek = value_
self.DayofWeek_nsprefix_ = child_.prefix
# validate type DayOfWeekType
self.validate_DayOfWeekType(self.DayofWeek)
elif nodeName_ == 'OperationalHours':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'OperationalHours')
value_ = self.gds_validate_string(value_, node, 'OperationalHours')
self.OperationalHours = value_
self.OperationalHours_nsprefix_ = child_.prefix
# validate type OperationalHoursType
self.validate_OperationalHoursType(self.OperationalHours)
elif nodeName_ == 'Hours':
obj_ = TimeRange.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Hours.append(obj_)
obj_.original_tagname_ = 'Hours'
# end class LocationHours
class LocationIdentificationDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Type=None, Id=None, Number=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Type = Type
self.validate_FedExLocationType(self.Type)
self.Type_nsprefix_ = None
self.Id = Id
self.Id_nsprefix_ = None
self.Number = Number
self.Number_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationIdentificationDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationIdentificationDetail.subclass:
return LocationIdentificationDetail.subclass(*args_, **kwargs_)
else:
return LocationIdentificationDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Type(self):
return self.Type
def set_Type(self, Type):
self.Type = Type
def get_Id(self):
return self.Id
def set_Id(self, Id):
self.Id = Id
def get_Number(self):
return self.Number
def set_Number(self, Number):
self.Number = Number
def validate_FedExLocationType(self, value):
result = True
# Validate type FedExLocationType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FEDEX_AUTHORIZED_SHIP_CENTER', 'FEDEX_EXPRESS_STATION', 'FEDEX_FACILITY', 'FEDEX_FREIGHT_SERVICE_CENTER', 'FEDEX_GROUND_TERMINAL', 'FEDEX_HOME_DELIVERY_STATION', 'FEDEX_OFFICE', 'FEDEX_ONSITE', 'FEDEX_SELF_SERVICE_LOCATION', 'FEDEX_SHIPSITE', 'FEDEX_SHIP_AND_GET', 'FEDEX_SMART_POST_HUB']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on FedExLocationType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Type is not None or
self.Id is not None or
self.Number is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationIdentificationDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationIdentificationDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationIdentificationDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationIdentificationDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationIdentificationDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationIdentificationDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationIdentificationDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Type is not None:
namespaceprefix_ = self.Type_nsprefix_ + ':' if (UseCapturedNS_ and self.Type_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sType>%s</%sType>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Type), input_name='Type')), namespaceprefix_ , eol_))
if self.Id is not None:
namespaceprefix_ = self.Id_nsprefix_ + ':' if (UseCapturedNS_ and self.Id_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sId>%s</%sId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Id), input_name='Id')), namespaceprefix_ , eol_))
if self.Number is not None:
namespaceprefix_ = self.Number_nsprefix_ + ':' if (UseCapturedNS_ and self.Number_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sNumber>%s</%sNumber>%s' % (namespaceprefix_ , self.gds_format_integer(self.Number, input_name='Number'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Type':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Type')
value_ = self.gds_validate_string(value_, node, 'Type')
self.Type = value_
self.Type_nsprefix_ = child_.prefix
# validate type FedExLocationType
self.validate_FedExLocationType(self.Type)
elif nodeName_ == 'Id':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Id')
value_ = self.gds_validate_string(value_, node, 'Id')
self.Id = value_
self.Id_nsprefix_ = child_.prefix
elif nodeName_ == 'Number' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Number')
ival_ = self.gds_validate_integer(ival_, node, 'Number')
self.Number = ival_
self.Number_nsprefix_ = child_.prefix
# end class LocationIdentificationDetail
class LocationPackageLimitsDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Weight=None, Dimensions=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Weight = Weight
self.Weight_nsprefix_ = None
self.Dimensions = Dimensions
self.Dimensions_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationPackageLimitsDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationPackageLimitsDetail.subclass:
return LocationPackageLimitsDetail.subclass(*args_, **kwargs_)
else:
return LocationPackageLimitsDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Weight(self):
return self.Weight
def set_Weight(self, Weight):
self.Weight = Weight
def get_Dimensions(self):
return self.Dimensions
def set_Dimensions(self, Dimensions):
self.Dimensions = Dimensions
def hasContent_(self):
if (
self.Weight is not None or
self.Dimensions is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationPackageLimitsDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationPackageLimitsDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationPackageLimitsDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationPackageLimitsDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationPackageLimitsDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationPackageLimitsDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationPackageLimitsDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Weight is not None:
namespaceprefix_ = self.Weight_nsprefix_ + ':' if (UseCapturedNS_ and self.Weight_nsprefix_) else ''
self.Weight.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Weight', pretty_print=pretty_print)
if self.Dimensions is not None:
namespaceprefix_ = self.Dimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.Dimensions_nsprefix_) else ''
self.Dimensions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Dimensions', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Weight':
obj_ = Weight.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Weight = obj_
obj_.original_tagname_ = 'Weight'
elif nodeName_ == 'Dimensions':
obj_ = Dimensions.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Dimensions = obj_
obj_.original_tagname_ = 'Dimensions'
# end class LocationPackageLimitsDetail
class LocationSortDetail(GeneratedsSuper):
"""Specifies the criterion and order to be used to sort the location
details."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Criterion=None, Order=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Criterion = Criterion
self.validate_LocationSortCriteriaType(self.Criterion)
self.Criterion_nsprefix_ = None
self.Order = Order
self.validate_LocationSortOrderType(self.Order)
self.Order_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationSortDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationSortDetail.subclass:
return LocationSortDetail.subclass(*args_, **kwargs_)
else:
return LocationSortDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Criterion(self):
return self.Criterion
def set_Criterion(self, Criterion):
self.Criterion = Criterion
def get_Order(self):
return self.Order
def set_Order(self, Order):
self.Order = Order
def validate_LocationSortCriteriaType(self, value):
result = True
# Validate type LocationSortCriteriaType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['DISTANCE', 'LATEST_EXPRESS_DROPOFF_TIME', 'LATEST_GROUND_DROPOFF_TIME', 'LOCATION_TYPE']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationSortCriteriaType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_LocationSortOrderType(self, value):
result = True
# Validate type LocationSortOrderType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['HIGHEST_TO_LOWEST', 'LOWEST_TO_HIGHEST']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationSortOrderType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Criterion is not None or
self.Order is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSortDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationSortDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationSortDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationSortDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationSortDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationSortDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSortDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Criterion is not None:
namespaceprefix_ = self.Criterion_nsprefix_ + ':' if (UseCapturedNS_ and self.Criterion_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCriterion>%s</%sCriterion>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Criterion), input_name='Criterion')), namespaceprefix_ , eol_))
if self.Order is not None:
namespaceprefix_ = self.Order_nsprefix_ + ':' if (UseCapturedNS_ and self.Order_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sOrder>%s</%sOrder>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Order), input_name='Order')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Criterion':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Criterion')
value_ = self.gds_validate_string(value_, node, 'Criterion')
self.Criterion = value_
self.Criterion_nsprefix_ = child_.prefix
# validate type LocationSortCriteriaType
self.validate_LocationSortCriteriaType(self.Criterion)
elif nodeName_ == 'Order':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Order')
value_ = self.gds_validate_string(value_, node, 'Order')
self.Order = value_
self.Order_nsprefix_ = child_.prefix
# validate type LocationSortOrderType
self.validate_LocationSortOrderType(self.Order)
# end class LocationSortDetail
class LocationSupportedPackageDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Weight=None, Dimensions=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Weight = Weight
self.Weight_nsprefix_ = None
self.Dimensions = Dimensions
self.Dimensions_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationSupportedPackageDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationSupportedPackageDetail.subclass:
return LocationSupportedPackageDetail.subclass(*args_, **kwargs_)
else:
return LocationSupportedPackageDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Weight(self):
return self.Weight
def set_Weight(self, Weight):
self.Weight = Weight
def get_Dimensions(self):
return self.Dimensions
def set_Dimensions(self, Dimensions):
self.Dimensions = Dimensions
def hasContent_(self):
if (
self.Weight is not None or
self.Dimensions is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSupportedPackageDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationSupportedPackageDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationSupportedPackageDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationSupportedPackageDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationSupportedPackageDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationSupportedPackageDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSupportedPackageDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Weight is not None:
namespaceprefix_ = self.Weight_nsprefix_ + ':' if (UseCapturedNS_ and self.Weight_nsprefix_) else ''
self.Weight.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Weight', pretty_print=pretty_print)
if self.Dimensions is not None:
namespaceprefix_ = self.Dimensions_nsprefix_ + ':' if (UseCapturedNS_ and self.Dimensions_nsprefix_) else ''
self.Dimensions.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Dimensions', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Weight':
obj_ = Weight.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Weight = obj_
obj_.original_tagname_ = 'Weight'
elif nodeName_ == 'Dimensions':
obj_ = Dimensions.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Dimensions = obj_
obj_.original_tagname_ = 'Dimensions'
# end class LocationSupportedPackageDetail
class LocationSupportedShipmentDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, PackageDetails=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if PackageDetails is None:
self.PackageDetails = []
else:
self.PackageDetails = PackageDetails
self.PackageDetails_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationSupportedShipmentDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationSupportedShipmentDetail.subclass:
return LocationSupportedShipmentDetail.subclass(*args_, **kwargs_)
else:
return LocationSupportedShipmentDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_PackageDetails(self):
return self.PackageDetails
def set_PackageDetails(self, PackageDetails):
self.PackageDetails = PackageDetails
def add_PackageDetails(self, value):
self.PackageDetails.append(value)
def insert_PackageDetails_at(self, index, value):
self.PackageDetails.insert(index, value)
def replace_PackageDetails_at(self, index, value):
self.PackageDetails[index] = value
def hasContent_(self):
if (
self.PackageDetails
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSupportedShipmentDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationSupportedShipmentDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LocationSupportedShipmentDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationSupportedShipmentDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LocationSupportedShipmentDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationSupportedShipmentDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LocationSupportedShipmentDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for PackageDetails_ in self.PackageDetails:
namespaceprefix_ = self.PackageDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.PackageDetails_nsprefix_) else ''
PackageDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PackageDetails', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'PackageDetails':
obj_ = LocationSupportedPackageDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.PackageDetails.append(obj_)
obj_.original_tagname_ = 'PackageDetails'
# end class LocationSupportedShipmentDetail
class Notification(GeneratedsSuper):
"""The descriptive data regarding the result of the submitted
transaction."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Severity=None, Source=None, Code=None, Message=None, LocalizedMessage=None, MessageParameters=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Severity = Severity
self.validate_NotificationSeverityType(self.Severity)
self.Severity_nsprefix_ = None
self.Source = Source
self.Source_nsprefix_ = None
self.Code = Code
self.Code_nsprefix_ = None
self.Message = Message
self.Message_nsprefix_ = None
self.LocalizedMessage = LocalizedMessage
self.LocalizedMessage_nsprefix_ = None
if MessageParameters is None:
self.MessageParameters = []
else:
self.MessageParameters = MessageParameters
self.MessageParameters_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Notification)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Notification.subclass:
return Notification.subclass(*args_, **kwargs_)
else:
return Notification(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Severity(self):
return self.Severity
def set_Severity(self, Severity):
self.Severity = Severity
def get_Source(self):
return self.Source
def set_Source(self, Source):
self.Source = Source
def get_Code(self):
return self.Code
def set_Code(self, Code):
self.Code = Code
def get_Message(self):
return self.Message
def set_Message(self, Message):
self.Message = Message
def get_LocalizedMessage(self):
return self.LocalizedMessage
def set_LocalizedMessage(self, LocalizedMessage):
self.LocalizedMessage = LocalizedMessage
def get_MessageParameters(self):
return self.MessageParameters
def set_MessageParameters(self, MessageParameters):
self.MessageParameters = MessageParameters
def add_MessageParameters(self, value):
self.MessageParameters.append(value)
def insert_MessageParameters_at(self, index, value):
self.MessageParameters.insert(index, value)
def replace_MessageParameters_at(self, index, value):
self.MessageParameters[index] = value
def validate_NotificationSeverityType(self, value):
result = True
# Validate type NotificationSeverityType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ERROR', 'FAILURE', 'NOTE', 'SUCCESS', 'WARNING']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on NotificationSeverityType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Severity is not None or
self.Source is not None or
self.Code is not None or
self.Message is not None or
self.LocalizedMessage is not None or
self.MessageParameters
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Notification', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Notification')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Notification':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Notification')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Notification', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Notification'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Notification', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Severity is not None:
namespaceprefix_ = self.Severity_nsprefix_ + ':' if (UseCapturedNS_ and self.Severity_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSeverity>%s</%sSeverity>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Severity), input_name='Severity')), namespaceprefix_ , eol_))
if self.Source is not None:
namespaceprefix_ = self.Source_nsprefix_ + ':' if (UseCapturedNS_ and self.Source_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSource>%s</%sSource>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Source), input_name='Source')), namespaceprefix_ , eol_))
if self.Code is not None:
namespaceprefix_ = self.Code_nsprefix_ + ':' if (UseCapturedNS_ and self.Code_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCode>%s</%sCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Code), input_name='Code')), namespaceprefix_ , eol_))
if self.Message is not None:
namespaceprefix_ = self.Message_nsprefix_ + ':' if (UseCapturedNS_ and self.Message_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMessage>%s</%sMessage>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Message), input_name='Message')), namespaceprefix_ , eol_))
if self.LocalizedMessage is not None:
namespaceprefix_ = self.LocalizedMessage_nsprefix_ + ':' if (UseCapturedNS_ and self.LocalizedMessage_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocalizedMessage>%s</%sLocalizedMessage>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocalizedMessage), input_name='LocalizedMessage')), namespaceprefix_ , eol_))
for MessageParameters_ in self.MessageParameters:
namespaceprefix_ = self.MessageParameters_nsprefix_ + ':' if (UseCapturedNS_ and self.MessageParameters_nsprefix_) else ''
MessageParameters_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MessageParameters', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Severity':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Severity')
value_ = self.gds_validate_string(value_, node, 'Severity')
self.Severity = value_
self.Severity_nsprefix_ = child_.prefix
# validate type NotificationSeverityType
self.validate_NotificationSeverityType(self.Severity)
elif nodeName_ == 'Source':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Source')
value_ = self.gds_validate_string(value_, node, 'Source')
self.Source = value_
self.Source_nsprefix_ = child_.prefix
elif nodeName_ == 'Code':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Code')
value_ = self.gds_validate_string(value_, node, 'Code')
self.Code = value_
self.Code_nsprefix_ = child_.prefix
elif nodeName_ == 'Message':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Message')
value_ = self.gds_validate_string(value_, node, 'Message')
self.Message = value_
self.Message_nsprefix_ = child_.prefix
elif nodeName_ == 'LocalizedMessage':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocalizedMessage')
value_ = self.gds_validate_string(value_, node, 'LocalizedMessage')
self.LocalizedMessage = value_
self.LocalizedMessage_nsprefix_ = child_.prefix
elif nodeName_ == 'MessageParameters':
obj_ = NotificationParameter.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MessageParameters.append(obj_)
obj_.original_tagname_ = 'MessageParameters'
# end class Notification
class NotificationParameter(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Id=None, Value=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Id = Id
self.Id_nsprefix_ = None
self.Value = Value
self.Value_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, NotificationParameter)
if subclass is not None:
return subclass(*args_, **kwargs_)
if NotificationParameter.subclass:
return NotificationParameter.subclass(*args_, **kwargs_)
else:
return NotificationParameter(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Id(self):
return self.Id
def set_Id(self, Id):
self.Id = Id
def get_Value(self):
return self.Value
def set_Value(self, Value):
self.Value = Value
def hasContent_(self):
if (
self.Id is not None or
self.Value is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NotificationParameter', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('NotificationParameter')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'NotificationParameter':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NotificationParameter')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NotificationParameter', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NotificationParameter'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NotificationParameter', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Id is not None:
namespaceprefix_ = self.Id_nsprefix_ + ':' if (UseCapturedNS_ and self.Id_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sId>%s</%sId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Id), input_name='Id')), namespaceprefix_ , eol_))
if self.Value is not None:
namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sValue>%s</%sValue>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Value), input_name='Value')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Id':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Id')
value_ = self.gds_validate_string(value_, node, 'Id')
self.Id = value_
self.Id_nsprefix_ = child_.prefix
elif nodeName_ == 'Value':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Value')
value_ = self.gds_validate_string(value_, node, 'Value')
self.Value = value_
self.Value_nsprefix_ = child_.prefix
# end class NotificationParameter
class ReservationAvailabilityDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Attributes=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if Attributes is None:
self.Attributes = []
else:
self.Attributes = Attributes
self.Attributes_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReservationAvailabilityDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReservationAvailabilityDetail.subclass:
return ReservationAvailabilityDetail.subclass(*args_, **kwargs_)
else:
return ReservationAvailabilityDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Attributes(self):
return self.Attributes
def set_Attributes(self, Attributes):
self.Attributes = Attributes
def add_Attributes(self, value):
self.Attributes.append(value)
def insert_Attributes_at(self, index, value):
self.Attributes.insert(index, value)
def replace_Attributes_at(self, index, value):
self.Attributes[index] = value
def validate_ReservationAttributesType(self, value):
result = True
# Validate type ReservationAttributesType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['RESERVATION_AVAILABLE']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReservationAttributesType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Attributes
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReservationAvailabilityDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReservationAvailabilityDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ReservationAvailabilityDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReservationAvailabilityDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReservationAvailabilityDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReservationAvailabilityDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReservationAvailabilityDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Attributes_ in self.Attributes:
namespaceprefix_ = self.Attributes_nsprefix_ + ':' if (UseCapturedNS_ and self.Attributes_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sAttributes>%s</%sAttributes>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Attributes_), input_name='Attributes')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Attributes':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Attributes')
value_ = self.gds_validate_string(value_, node, 'Attributes')
self.Attributes.append(value_)
self.Attributes_nsprefix_ = child_.prefix
# validate type ReservationAttributesType
self.validate_ReservationAttributesType(self.Attributes[-1])
# end class ReservationAvailabilityDetail
class SearchLocationConstraints(GeneratedsSuper):
"""Specifies additional constraints on the attributes of the locations
being searched."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, RadiusDistance=None, DropOffTimeNeeded=None, ResultsFilters=None, SupportedRedirectToHoldServices=None, RequiredLocationAttributes=None, RequiredLocationCapabilities=None, ShipmentDetail=None, ResultsToSkip=None, ResultsRequested=None, LocationContentOptions=None, LocationTypesToInclude=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.RadiusDistance = RadiusDistance
self.RadiusDistance_nsprefix_ = None
if isinstance(DropOffTimeNeeded, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DropOffTimeNeeded, '%H:%M:%S').time()
else:
initvalue_ = DropOffTimeNeeded
self.DropOffTimeNeeded = initvalue_
self.DropOffTimeNeeded_nsprefix_ = None
if ResultsFilters is None:
self.ResultsFilters = []
else:
self.ResultsFilters = ResultsFilters
self.ResultsFilters_nsprefix_ = None
if SupportedRedirectToHoldServices is None:
self.SupportedRedirectToHoldServices = []
else:
self.SupportedRedirectToHoldServices = SupportedRedirectToHoldServices
self.SupportedRedirectToHoldServices_nsprefix_ = None
if RequiredLocationAttributes is None:
self.RequiredLocationAttributes = []
else:
self.RequiredLocationAttributes = RequiredLocationAttributes
self.RequiredLocationAttributes_nsprefix_ = None
if RequiredLocationCapabilities is None:
self.RequiredLocationCapabilities = []
else:
self.RequiredLocationCapabilities = RequiredLocationCapabilities
self.RequiredLocationCapabilities_nsprefix_ = None
self.ShipmentDetail = ShipmentDetail
self.ShipmentDetail_nsprefix_ = None
self.ResultsToSkip = ResultsToSkip
self.ResultsToSkip_nsprefix_ = None
self.ResultsRequested = ResultsRequested
self.ResultsRequested_nsprefix_ = None
if LocationContentOptions is None:
self.LocationContentOptions = []
else:
self.LocationContentOptions = LocationContentOptions
self.LocationContentOptions_nsprefix_ = None
if LocationTypesToInclude is None:
self.LocationTypesToInclude = []
else:
self.LocationTypesToInclude = LocationTypesToInclude
self.LocationTypesToInclude_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SearchLocationConstraints)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SearchLocationConstraints.subclass:
return SearchLocationConstraints.subclass(*args_, **kwargs_)
else:
return SearchLocationConstraints(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_RadiusDistance(self):
return self.RadiusDistance
def set_RadiusDistance(self, RadiusDistance):
self.RadiusDistance = RadiusDistance
def get_DropOffTimeNeeded(self):
return self.DropOffTimeNeeded
def set_DropOffTimeNeeded(self, DropOffTimeNeeded):
self.DropOffTimeNeeded = DropOffTimeNeeded
def get_ResultsFilters(self):
return self.ResultsFilters
def set_ResultsFilters(self, ResultsFilters):
self.ResultsFilters = ResultsFilters
def add_ResultsFilters(self, value):
self.ResultsFilters.append(value)
def insert_ResultsFilters_at(self, index, value):
self.ResultsFilters.insert(index, value)
def replace_ResultsFilters_at(self, index, value):
self.ResultsFilters[index] = value
def get_SupportedRedirectToHoldServices(self):
return self.SupportedRedirectToHoldServices
def set_SupportedRedirectToHoldServices(self, SupportedRedirectToHoldServices):
self.SupportedRedirectToHoldServices = SupportedRedirectToHoldServices
def add_SupportedRedirectToHoldServices(self, value):
self.SupportedRedirectToHoldServices.append(value)
def insert_SupportedRedirectToHoldServices_at(self, index, value):
self.SupportedRedirectToHoldServices.insert(index, value)
def replace_SupportedRedirectToHoldServices_at(self, index, value):
self.SupportedRedirectToHoldServices[index] = value
def get_RequiredLocationAttributes(self):
return self.RequiredLocationAttributes
def set_RequiredLocationAttributes(self, RequiredLocationAttributes):
self.RequiredLocationAttributes = RequiredLocationAttributes
def add_RequiredLocationAttributes(self, value):
self.RequiredLocationAttributes.append(value)
def insert_RequiredLocationAttributes_at(self, index, value):
self.RequiredLocationAttributes.insert(index, value)
def replace_RequiredLocationAttributes_at(self, index, value):
self.RequiredLocationAttributes[index] = value
def get_RequiredLocationCapabilities(self):
return self.RequiredLocationCapabilities
def set_RequiredLocationCapabilities(self, RequiredLocationCapabilities):
self.RequiredLocationCapabilities = RequiredLocationCapabilities
def add_RequiredLocationCapabilities(self, value):
self.RequiredLocationCapabilities.append(value)
def insert_RequiredLocationCapabilities_at(self, index, value):
self.RequiredLocationCapabilities.insert(index, value)
def replace_RequiredLocationCapabilities_at(self, index, value):
self.RequiredLocationCapabilities[index] = value
def get_ShipmentDetail(self):
return self.ShipmentDetail
def set_ShipmentDetail(self, ShipmentDetail):
self.ShipmentDetail = ShipmentDetail
def get_ResultsToSkip(self):
return self.ResultsToSkip
def set_ResultsToSkip(self, ResultsToSkip):
self.ResultsToSkip = ResultsToSkip
def get_ResultsRequested(self):
return self.ResultsRequested
def set_ResultsRequested(self, ResultsRequested):
self.ResultsRequested = ResultsRequested
def get_LocationContentOptions(self):
return self.LocationContentOptions
def set_LocationContentOptions(self, LocationContentOptions):
self.LocationContentOptions = LocationContentOptions
def add_LocationContentOptions(self, value):
self.LocationContentOptions.append(value)
def insert_LocationContentOptions_at(self, index, value):
self.LocationContentOptions.insert(index, value)
def replace_LocationContentOptions_at(self, index, value):
self.LocationContentOptions[index] = value
def get_LocationTypesToInclude(self):
return self.LocationTypesToInclude
def set_LocationTypesToInclude(self, LocationTypesToInclude):
self.LocationTypesToInclude = LocationTypesToInclude
def add_LocationTypesToInclude(self, value):
self.LocationTypesToInclude.append(value)
def insert_LocationTypesToInclude_at(self, index, value):
self.LocationTypesToInclude.insert(index, value)
def replace_LocationTypesToInclude_at(self, index, value):
self.LocationTypesToInclude[index] = value
def validate_LocationSearchFilterType(self, value):
result = True
# Validate type LocationSearchFilterType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['EXCLUDE_LOCATIONS_OUTSIDE_COUNTRY', 'EXCLUDE_LOCATIONS_OUTSIDE_STATE_OR_PROVINCE', 'EXCLUDE_UNAVAILABLE_LOCATIONS']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationSearchFilterType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_SupportedRedirectToHoldServiceType(self, value):
result = True
# Validate type SupportedRedirectToHoldServiceType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FEDEX_EXPRESS', 'FEDEX_GROUND', 'FEDEX_GROUND_HOME_DELIVERY']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on SupportedRedirectToHoldServiceType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_LocationAttributesType(self, value):
result = True
# Validate type LocationAttributesType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ACCEPTS_CASH', 'ALREADY_OPEN', 'CLEARANCE_SERVICES', 'COPY_AND_PRINT_SERVICES', 'DANGEROUS_GOODS_SERVICES', 'DIRECT_MAIL_SERVICES', 'DOMESTIC_SHIPPING_SERVICES', 'DROP_BOX', 'INTERNATIONAL_SHIPPING_SERVICES', 'LOCATION_IS_IN_AIRPORT', 'NOTARY_SERVICES', 'OBSERVES_DAY_LIGHT_SAVING_TIMES', 'OPEN_TWENTY_FOUR_HOURS', 'PACKAGING_SUPPLIES', 'PACK_AND_SHIP', 'PASSPORT_PHOTO_SERVICES', 'RETURNS_SERVICES', 'SIGNS_AND_BANNERS_SERVICE', 'SONY_PICTURE_STATION']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationAttributesType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_LocationContentOptionType(self, value):
result = True
# Validate type LocationContentOptionType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['HOLIDAYS', 'LOCATION_DROPOFF_TIMES', 'MAP_URL', 'TIMEZONE_OFFSET']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationContentOptionType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_FedExLocationType(self, value):
result = True
# Validate type FedExLocationType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FEDEX_AUTHORIZED_SHIP_CENTER', 'FEDEX_EXPRESS_STATION', 'FEDEX_FACILITY', 'FEDEX_FREIGHT_SERVICE_CENTER', 'FEDEX_GROUND_TERMINAL', 'FEDEX_HOME_DELIVERY_STATION', 'FEDEX_OFFICE', 'FEDEX_ONSITE', 'FEDEX_SELF_SERVICE_LOCATION', 'FEDEX_SHIPSITE', 'FEDEX_SHIP_AND_GET', 'FEDEX_SMART_POST_HUB']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on FedExLocationType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.RadiusDistance is not None or
self.DropOffTimeNeeded is not None or
self.ResultsFilters or
self.SupportedRedirectToHoldServices or
self.RequiredLocationAttributes or
self.RequiredLocationCapabilities or
self.ShipmentDetail is not None or
self.ResultsToSkip is not None or
self.ResultsRequested is not None or
self.LocationContentOptions or
self.LocationTypesToInclude
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationConstraints', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SearchLocationConstraints')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'SearchLocationConstraints':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SearchLocationConstraints')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SearchLocationConstraints', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SearchLocationConstraints'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationConstraints', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.RadiusDistance is not None:
namespaceprefix_ = self.RadiusDistance_nsprefix_ + ':' if (UseCapturedNS_ and self.RadiusDistance_nsprefix_) else ''
self.RadiusDistance.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RadiusDistance', pretty_print=pretty_print)
if self.DropOffTimeNeeded is not None:
namespaceprefix_ = self.DropOffTimeNeeded_nsprefix_ + ':' if (UseCapturedNS_ and self.DropOffTimeNeeded_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sDropOffTimeNeeded>%s</%sDropOffTimeNeeded>%s' % (namespaceprefix_ , self.gds_format_time(self.DropOffTimeNeeded, input_name='DropOffTimeNeeded'), namespaceprefix_ , eol_))
for ResultsFilters_ in self.ResultsFilters:
namespaceprefix_ = self.ResultsFilters_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultsFilters_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sResultsFilters>%s</%sResultsFilters>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ResultsFilters_), input_name='ResultsFilters')), namespaceprefix_ , eol_))
for SupportedRedirectToHoldServices_ in self.SupportedRedirectToHoldServices:
namespaceprefix_ = self.SupportedRedirectToHoldServices_nsprefix_ + ':' if (UseCapturedNS_ and self.SupportedRedirectToHoldServices_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sSupportedRedirectToHoldServices>%s</%sSupportedRedirectToHoldServices>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(SupportedRedirectToHoldServices_), input_name='SupportedRedirectToHoldServices')), namespaceprefix_ , eol_))
for RequiredLocationAttributes_ in self.RequiredLocationAttributes:
namespaceprefix_ = self.RequiredLocationAttributes_nsprefix_ + ':' if (UseCapturedNS_ and self.RequiredLocationAttributes_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sRequiredLocationAttributes>%s</%sRequiredLocationAttributes>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RequiredLocationAttributes_), input_name='RequiredLocationAttributes')), namespaceprefix_ , eol_))
for RequiredLocationCapabilities_ in self.RequiredLocationCapabilities:
namespaceprefix_ = self.RequiredLocationCapabilities_nsprefix_ + ':' if (UseCapturedNS_ and self.RequiredLocationCapabilities_nsprefix_) else ''
RequiredLocationCapabilities_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RequiredLocationCapabilities', pretty_print=pretty_print)
if self.ShipmentDetail is not None:
namespaceprefix_ = self.ShipmentDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipmentDetail_nsprefix_) else ''
self.ShipmentDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ShipmentDetail', pretty_print=pretty_print)
if self.ResultsToSkip is not None:
namespaceprefix_ = self.ResultsToSkip_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultsToSkip_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sResultsToSkip>%s</%sResultsToSkip>%s' % (namespaceprefix_ , self.gds_format_integer(self.ResultsToSkip, input_name='ResultsToSkip'), namespaceprefix_ , eol_))
if self.ResultsRequested is not None:
namespaceprefix_ = self.ResultsRequested_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultsRequested_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sResultsRequested>%s</%sResultsRequested>%s' % (namespaceprefix_ , self.gds_format_integer(self.ResultsRequested, input_name='ResultsRequested'), namespaceprefix_ , eol_))
for LocationContentOptions_ in self.LocationContentOptions:
namespaceprefix_ = self.LocationContentOptions_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationContentOptions_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationContentOptions>%s</%sLocationContentOptions>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(LocationContentOptions_), input_name='LocationContentOptions')), namespaceprefix_ , eol_))
for LocationTypesToInclude_ in self.LocationTypesToInclude:
namespaceprefix_ = self.LocationTypesToInclude_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationTypesToInclude_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationTypesToInclude>%s</%sLocationTypesToInclude>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(LocationTypesToInclude_), input_name='LocationTypesToInclude')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'RadiusDistance':
obj_ = Distance.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RadiusDistance = obj_
obj_.original_tagname_ = 'RadiusDistance'
elif nodeName_ == 'DropOffTimeNeeded':
sval_ = child_.text
dval_ = self.gds_parse_time(sval_)
self.DropOffTimeNeeded = dval_
self.DropOffTimeNeeded_nsprefix_ = child_.prefix
elif nodeName_ == 'ResultsFilters':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ResultsFilters')
value_ = self.gds_validate_string(value_, node, 'ResultsFilters')
self.ResultsFilters.append(value_)
self.ResultsFilters_nsprefix_ = child_.prefix
# validate type LocationSearchFilterType
self.validate_LocationSearchFilterType(self.ResultsFilters[-1])
elif nodeName_ == 'SupportedRedirectToHoldServices':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SupportedRedirectToHoldServices')
value_ = self.gds_validate_string(value_, node, 'SupportedRedirectToHoldServices')
self.SupportedRedirectToHoldServices.append(value_)
self.SupportedRedirectToHoldServices_nsprefix_ = child_.prefix
# validate type SupportedRedirectToHoldServiceType
self.validate_SupportedRedirectToHoldServiceType(self.SupportedRedirectToHoldServices[-1])
elif nodeName_ == 'RequiredLocationAttributes':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'RequiredLocationAttributes')
value_ = self.gds_validate_string(value_, node, 'RequiredLocationAttributes')
self.RequiredLocationAttributes.append(value_)
self.RequiredLocationAttributes_nsprefix_ = child_.prefix
# validate type LocationAttributesType
self.validate_LocationAttributesType(self.RequiredLocationAttributes[-1])
elif nodeName_ == 'RequiredLocationCapabilities':
obj_ = LocationCapabilityDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RequiredLocationCapabilities.append(obj_)
obj_.original_tagname_ = 'RequiredLocationCapabilities'
elif nodeName_ == 'ShipmentDetail':
obj_ = LocationSupportedShipmentDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ShipmentDetail = obj_
obj_.original_tagname_ = 'ShipmentDetail'
elif nodeName_ == 'ResultsToSkip' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ResultsToSkip')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ResultsToSkip')
self.ResultsToSkip = ival_
self.ResultsToSkip_nsprefix_ = child_.prefix
elif nodeName_ == 'ResultsRequested' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ResultsRequested')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ResultsRequested')
self.ResultsRequested = ival_
self.ResultsRequested_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationContentOptions':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationContentOptions')
value_ = self.gds_validate_string(value_, node, 'LocationContentOptions')
self.LocationContentOptions.append(value_)
self.LocationContentOptions_nsprefix_ = child_.prefix
# validate type LocationContentOptionType
self.validate_LocationContentOptionType(self.LocationContentOptions[-1])
elif nodeName_ == 'LocationTypesToInclude':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationTypesToInclude')
value_ = self.gds_validate_string(value_, node, 'LocationTypesToInclude')
self.LocationTypesToInclude.append(value_)
self.LocationTypesToInclude_nsprefix_ = child_.prefix
# validate type FedExLocationType
self.validate_FedExLocationType(self.LocationTypesToInclude[-1])
# end class SearchLocationConstraints
class ValidateLocationAvailabilityRequest(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, RestrictionsAndPrivileges=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.RestrictionsAndPrivileges = RestrictionsAndPrivileges
self.RestrictionsAndPrivileges_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ValidateLocationAvailabilityRequest)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ValidateLocationAvailabilityRequest.subclass:
return ValidateLocationAvailabilityRequest.subclass(*args_, **kwargs_)
else:
return ValidateLocationAvailabilityRequest(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_RestrictionsAndPrivileges(self):
return self.RestrictionsAndPrivileges
def set_RestrictionsAndPrivileges(self, RestrictionsAndPrivileges):
self.RestrictionsAndPrivileges = RestrictionsAndPrivileges
def hasContent_(self):
if (
self.RestrictionsAndPrivileges is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValidateLocationAvailabilityRequest', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ValidateLocationAvailabilityRequest')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ValidateLocationAvailabilityRequest':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ValidateLocationAvailabilityRequest')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ValidateLocationAvailabilityRequest', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ValidateLocationAvailabilityRequest'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValidateLocationAvailabilityRequest', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.RestrictionsAndPrivileges is not None:
namespaceprefix_ = self.RestrictionsAndPrivileges_nsprefix_ + ':' if (UseCapturedNS_ and self.RestrictionsAndPrivileges_nsprefix_) else ''
self.RestrictionsAndPrivileges.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RestrictionsAndPrivileges', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'RestrictionsAndPrivileges':
obj_ = RestrictionsAndPrivilegesPolicyDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RestrictionsAndPrivileges = obj_
obj_.original_tagname_ = 'RestrictionsAndPrivileges'
# end class ValidateLocationAvailabilityRequest
class RestrictionsAndPrivilegesPolicyDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, PrivilegeDetails=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if PrivilegeDetails is None:
self.PrivilegeDetails = []
else:
self.PrivilegeDetails = PrivilegeDetails
self.PrivilegeDetails_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RestrictionsAndPrivilegesPolicyDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RestrictionsAndPrivilegesPolicyDetail.subclass:
return RestrictionsAndPrivilegesPolicyDetail.subclass(*args_, **kwargs_)
else:
return RestrictionsAndPrivilegesPolicyDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_PrivilegeDetails(self):
return self.PrivilegeDetails
def set_PrivilegeDetails(self, PrivilegeDetails):
self.PrivilegeDetails = PrivilegeDetails
def add_PrivilegeDetails(self, value):
self.PrivilegeDetails.append(value)
def insert_PrivilegeDetails_at(self, index, value):
self.PrivilegeDetails.insert(index, value)
def replace_PrivilegeDetails_at(self, index, value):
self.PrivilegeDetails[index] = value
def hasContent_(self):
if (
self.PrivilegeDetails
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionsAndPrivilegesPolicyDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RestrictionsAndPrivilegesPolicyDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RestrictionsAndPrivilegesPolicyDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RestrictionsAndPrivilegesPolicyDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RestrictionsAndPrivilegesPolicyDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RestrictionsAndPrivilegesPolicyDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RestrictionsAndPrivilegesPolicyDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for PrivilegeDetails_ in self.PrivilegeDetails:
namespaceprefix_ = self.PrivilegeDetails_nsprefix_ + ':' if (UseCapturedNS_ and self.PrivilegeDetails_nsprefix_) else ''
PrivilegeDetails_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PrivilegeDetails', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'PrivilegeDetails':
obj_ = EnterprisePrivilegeDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.PrivilegeDetails.append(obj_)
obj_.original_tagname_ = 'PrivilegeDetails'
# end class RestrictionsAndPrivilegesPolicyDetail
class DateRange(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Begins=None, Ends=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if isinstance(Begins, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Begins, '%Y-%m-%d').date()
else:
initvalue_ = Begins
self.Begins = initvalue_
self.Begins_nsprefix_ = None
if isinstance(Ends, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Ends, '%Y-%m-%d').date()
else:
initvalue_ = Ends
self.Ends = initvalue_
self.Ends_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DateRange)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DateRange.subclass:
return DateRange.subclass(*args_, **kwargs_)
else:
return DateRange(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Begins(self):
return self.Begins
def set_Begins(self, Begins):
self.Begins = Begins
def get_Ends(self):
return self.Ends
def set_Ends(self, Ends):
self.Ends = Ends
def hasContent_(self):
if (
self.Begins is not None or
self.Ends is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DateRange', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DateRange')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'DateRange':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DateRange')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DateRange', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DateRange'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DateRange', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Begins is not None:
namespaceprefix_ = self.Begins_nsprefix_ + ':' if (UseCapturedNS_ and self.Begins_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sBegins>%s</%sBegins>%s' % (namespaceprefix_ , self.gds_format_date(self.Begins, input_name='Begins'), namespaceprefix_ , eol_))
if self.Ends is not None:
namespaceprefix_ = self.Ends_nsprefix_ + ':' if (UseCapturedNS_ and self.Ends_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sEnds>%s</%sEnds>%s' % (namespaceprefix_ , self.gds_format_date(self.Ends, input_name='Ends'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Begins':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.Begins = dval_
self.Begins_nsprefix_ = child_.prefix
elif nodeName_ == 'Ends':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.Ends = dval_
self.Ends_nsprefix_ = child_.prefix
# end class DateRange
class EnterprisePrivilegeDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Id=None, Permission=None, CarrierCode=None, EffectiveDateRange=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Id = Id
self.Id_nsprefix_ = None
self.Permission = Permission
self.validate_EnterprisePermissionType(self.Permission)
self.Permission_nsprefix_ = None
self.CarrierCode = CarrierCode
self.validate_CarrierCodeType(self.CarrierCode)
self.CarrierCode_nsprefix_ = None
self.EffectiveDateRange = EffectiveDateRange
self.EffectiveDateRange_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, EnterprisePrivilegeDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if EnterprisePrivilegeDetail.subclass:
return EnterprisePrivilegeDetail.subclass(*args_, **kwargs_)
else:
return EnterprisePrivilegeDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Id(self):
return self.Id
def set_Id(self, Id):
self.Id = Id
def get_Permission(self):
return self.Permission
def set_Permission(self, Permission):
self.Permission = Permission
def get_CarrierCode(self):
return self.CarrierCode
def set_CarrierCode(self, CarrierCode):
self.CarrierCode = CarrierCode
def get_EffectiveDateRange(self):
return self.EffectiveDateRange
def set_EffectiveDateRange(self, EffectiveDateRange):
self.EffectiveDateRange = EffectiveDateRange
def validate_EnterprisePermissionType(self, value):
result = True
# Validate type EnterprisePermissionType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ALLOWED', 'ALLOWED_BY_EXCEPTION', 'DISALLOWED']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on EnterprisePermissionType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_CarrierCodeType(self, value):
result = True
# Validate type CarrierCodeType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['FDXC', 'FDXE', 'FDXG', 'FXCC', 'FXFR', 'FXSP']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on CarrierCodeType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Id is not None or
self.Permission is not None or
self.CarrierCode is not None or
self.EffectiveDateRange is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnterprisePrivilegeDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('EnterprisePrivilegeDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'EnterprisePrivilegeDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EnterprisePrivilegeDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EnterprisePrivilegeDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EnterprisePrivilegeDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EnterprisePrivilegeDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Id is not None:
namespaceprefix_ = self.Id_nsprefix_ + ':' if (UseCapturedNS_ and self.Id_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sId>%s</%sId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Id), input_name='Id')), namespaceprefix_ , eol_))
if self.Permission is not None:
namespaceprefix_ = self.Permission_nsprefix_ + ':' if (UseCapturedNS_ and self.Permission_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPermission>%s</%sPermission>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Permission), input_name='Permission')), namespaceprefix_ , eol_))
if self.CarrierCode is not None:
namespaceprefix_ = self.CarrierCode_nsprefix_ + ':' if (UseCapturedNS_ and self.CarrierCode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCarrierCode>%s</%sCarrierCode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CarrierCode), input_name='CarrierCode')), namespaceprefix_ , eol_))
if self.EffectiveDateRange is not None:
namespaceprefix_ = self.EffectiveDateRange_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveDateRange_nsprefix_) else ''
self.EffectiveDateRange.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EffectiveDateRange', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Id':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Id')
value_ = self.gds_validate_string(value_, node, 'Id')
self.Id = value_
self.Id_nsprefix_ = child_.prefix
elif nodeName_ == 'Permission':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Permission')
value_ = self.gds_validate_string(value_, node, 'Permission')
self.Permission = value_
self.Permission_nsprefix_ = child_.prefix
# validate type EnterprisePermissionType
self.validate_EnterprisePermissionType(self.Permission)
elif nodeName_ == 'CarrierCode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CarrierCode')
value_ = self.gds_validate_string(value_, node, 'CarrierCode')
self.CarrierCode = value_
self.CarrierCode_nsprefix_ = child_.prefix
# validate type CarrierCodeType
self.validate_CarrierCodeType(self.CarrierCode)
elif nodeName_ == 'EffectiveDateRange':
obj_ = DateRange.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.EffectiveDateRange = obj_
obj_.original_tagname_ = 'EffectiveDateRange'
# end class EnterprisePrivilegeDetail
class SearchLocationsReply(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, HighestSeverity=None, Notifications=None, TransactionDetail=None, Version=None, TotalResultsAvailable=None, ResultsReturned=None, FormattedAddress=None, AddressToLocationRelationships=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.HighestSeverity = HighestSeverity
self.validate_NotificationSeverityType(self.HighestSeverity)
self.HighestSeverity_nsprefix_ = None
if Notifications is None:
self.Notifications = []
else:
self.Notifications = Notifications
self.Notifications_nsprefix_ = None
self.TransactionDetail = TransactionDetail
self.TransactionDetail_nsprefix_ = None
self.Version = Version
self.Version_nsprefix_ = None
self.TotalResultsAvailable = TotalResultsAvailable
self.TotalResultsAvailable_nsprefix_ = None
self.ResultsReturned = ResultsReturned
self.ResultsReturned_nsprefix_ = None
self.FormattedAddress = FormattedAddress
self.FormattedAddress_nsprefix_ = None
if AddressToLocationRelationships is None:
self.AddressToLocationRelationships = []
else:
self.AddressToLocationRelationships = AddressToLocationRelationships
self.AddressToLocationRelationships_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SearchLocationsReply)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SearchLocationsReply.subclass:
return SearchLocationsReply.subclass(*args_, **kwargs_)
else:
return SearchLocationsReply(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_HighestSeverity(self):
return self.HighestSeverity
def set_HighestSeverity(self, HighestSeverity):
self.HighestSeverity = HighestSeverity
def get_Notifications(self):
return self.Notifications
def set_Notifications(self, Notifications):
self.Notifications = Notifications
def add_Notifications(self, value):
self.Notifications.append(value)
def insert_Notifications_at(self, index, value):
self.Notifications.insert(index, value)
def replace_Notifications_at(self, index, value):
self.Notifications[index] = value
def get_TransactionDetail(self):
return self.TransactionDetail
def set_TransactionDetail(self, TransactionDetail):
self.TransactionDetail = TransactionDetail
def get_Version(self):
return self.Version
def set_Version(self, Version):
self.Version = Version
def get_TotalResultsAvailable(self):
return self.TotalResultsAvailable
def set_TotalResultsAvailable(self, TotalResultsAvailable):
self.TotalResultsAvailable = TotalResultsAvailable
def get_ResultsReturned(self):
return self.ResultsReturned
def set_ResultsReturned(self, ResultsReturned):
self.ResultsReturned = ResultsReturned
def get_FormattedAddress(self):
return self.FormattedAddress
def set_FormattedAddress(self, FormattedAddress):
self.FormattedAddress = FormattedAddress
def get_AddressToLocationRelationships(self):
return self.AddressToLocationRelationships
def set_AddressToLocationRelationships(self, AddressToLocationRelationships):
self.AddressToLocationRelationships = AddressToLocationRelationships
def add_AddressToLocationRelationships(self, value):
self.AddressToLocationRelationships.append(value)
def insert_AddressToLocationRelationships_at(self, index, value):
self.AddressToLocationRelationships.insert(index, value)
def replace_AddressToLocationRelationships_at(self, index, value):
self.AddressToLocationRelationships[index] = value
def validate_NotificationSeverityType(self, value):
result = True
# Validate type NotificationSeverityType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ERROR', 'FAILURE', 'NOTE', 'SUCCESS', 'WARNING']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on NotificationSeverityType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.HighestSeverity is not None or
self.Notifications or
self.TransactionDetail is not None or
self.Version is not None or
self.TotalResultsAvailable is not None or
self.ResultsReturned is not None or
self.FormattedAddress is not None or
self.AddressToLocationRelationships
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationsReply', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SearchLocationsReply')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'SearchLocationsReply':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SearchLocationsReply')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SearchLocationsReply', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SearchLocationsReply'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationsReply', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.HighestSeverity is not None:
namespaceprefix_ = self.HighestSeverity_nsprefix_ + ':' if (UseCapturedNS_ and self.HighestSeverity_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sHighestSeverity>%s</%sHighestSeverity>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.HighestSeverity), input_name='HighestSeverity')), namespaceprefix_ , eol_))
for Notifications_ in self.Notifications:
namespaceprefix_ = self.Notifications_nsprefix_ + ':' if (UseCapturedNS_ and self.Notifications_nsprefix_) else ''
Notifications_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Notifications', pretty_print=pretty_print)
if self.TransactionDetail is not None:
namespaceprefix_ = self.TransactionDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.TransactionDetail_nsprefix_) else ''
self.TransactionDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TransactionDetail', pretty_print=pretty_print)
if self.Version is not None:
namespaceprefix_ = self.Version_nsprefix_ + ':' if (UseCapturedNS_ and self.Version_nsprefix_) else ''
self.Version.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Version', pretty_print=pretty_print)
if self.TotalResultsAvailable is not None:
namespaceprefix_ = self.TotalResultsAvailable_nsprefix_ + ':' if (UseCapturedNS_ and self.TotalResultsAvailable_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTotalResultsAvailable>%s</%sTotalResultsAvailable>%s' % (namespaceprefix_ , self.gds_format_integer(self.TotalResultsAvailable, input_name='TotalResultsAvailable'), namespaceprefix_ , eol_))
if self.ResultsReturned is not None:
namespaceprefix_ = self.ResultsReturned_nsprefix_ + ':' if (UseCapturedNS_ and self.ResultsReturned_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sResultsReturned>%s</%sResultsReturned>%s' % (namespaceprefix_ , self.gds_format_integer(self.ResultsReturned, input_name='ResultsReturned'), namespaceprefix_ , eol_))
if self.FormattedAddress is not None:
namespaceprefix_ = self.FormattedAddress_nsprefix_ + ':' if (UseCapturedNS_ and self.FormattedAddress_nsprefix_) else ''
self.FormattedAddress.export(outfile, level, namespaceprefix_, namespacedef_='', name_='FormattedAddress', pretty_print=pretty_print)
for AddressToLocationRelationships_ in self.AddressToLocationRelationships:
namespaceprefix_ = self.AddressToLocationRelationships_nsprefix_ + ':' if (UseCapturedNS_ and self.AddressToLocationRelationships_nsprefix_) else ''
AddressToLocationRelationships_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AddressToLocationRelationships', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'HighestSeverity':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'HighestSeverity')
value_ = self.gds_validate_string(value_, node, 'HighestSeverity')
self.HighestSeverity = value_
self.HighestSeverity_nsprefix_ = child_.prefix
# validate type NotificationSeverityType
self.validate_NotificationSeverityType(self.HighestSeverity)
elif nodeName_ == 'Notifications':
obj_ = Notification.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Notifications.append(obj_)
obj_.original_tagname_ = 'Notifications'
elif nodeName_ == 'TransactionDetail':
obj_ = TransactionDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TransactionDetail = obj_
obj_.original_tagname_ = 'TransactionDetail'
elif nodeName_ == 'Version':
obj_ = VersionId.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Version = obj_
obj_.original_tagname_ = 'Version'
elif nodeName_ == 'TotalResultsAvailable' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'TotalResultsAvailable')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'TotalResultsAvailable')
self.TotalResultsAvailable = ival_
self.TotalResultsAvailable_nsprefix_ = child_.prefix
elif nodeName_ == 'ResultsReturned' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ResultsReturned')
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ResultsReturned')
self.ResultsReturned = ival_
self.ResultsReturned_nsprefix_ = child_.prefix
elif nodeName_ == 'FormattedAddress':
obj_ = Address.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.FormattedAddress = obj_
obj_.original_tagname_ = 'FormattedAddress'
elif nodeName_ == 'AddressToLocationRelationships':
obj_ = AddressToLocationRelationshipDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AddressToLocationRelationships.append(obj_)
obj_.original_tagname_ = 'AddressToLocationRelationships'
# end class SearchLocationsReply
class SearchLocationsRequest(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, WebAuthenticationDetail=None, ClientDetail=None, TransactionDetail=None, Version=None, EffectiveDate=None, LocationsSearchCriterion=None, ShipperAccountNumber=None, UniqueTrackingNumber=None, Address=None, PhoneNumber=None, GeographicCoordinates=None, MultipleMatchesAction=None, SortDetail=None, Constraints=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.WebAuthenticationDetail = WebAuthenticationDetail
self.WebAuthenticationDetail_nsprefix_ = None
self.ClientDetail = ClientDetail
self.ClientDetail_nsprefix_ = None
self.TransactionDetail = TransactionDetail
self.TransactionDetail_nsprefix_ = None
self.Version = Version
self.Version_nsprefix_ = None
if isinstance(EffectiveDate, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(EffectiveDate, '%Y-%m-%d').date()
else:
initvalue_ = EffectiveDate
self.EffectiveDate = initvalue_
self.EffectiveDate_nsprefix_ = None
self.LocationsSearchCriterion = LocationsSearchCriterion
self.validate_LocationsSearchCriteriaType(self.LocationsSearchCriterion)
self.LocationsSearchCriterion_nsprefix_ = None
self.ShipperAccountNumber = ShipperAccountNumber
self.ShipperAccountNumber_nsprefix_ = None
self.UniqueTrackingNumber = UniqueTrackingNumber
self.UniqueTrackingNumber_nsprefix_ = None
self.Address = Address
self.Address_nsprefix_ = None
self.PhoneNumber = PhoneNumber
self.PhoneNumber_nsprefix_ = None
self.GeographicCoordinates = GeographicCoordinates
self.GeographicCoordinates_nsprefix_ = None
self.MultipleMatchesAction = MultipleMatchesAction
self.validate_MultipleMatchesActionType(self.MultipleMatchesAction)
self.MultipleMatchesAction_nsprefix_ = None
self.SortDetail = SortDetail
self.SortDetail_nsprefix_ = None
self.Constraints = Constraints
self.Constraints_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SearchLocationsRequest)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SearchLocationsRequest.subclass:
return SearchLocationsRequest.subclass(*args_, **kwargs_)
else:
return SearchLocationsRequest(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_WebAuthenticationDetail(self):
return self.WebAuthenticationDetail
def set_WebAuthenticationDetail(self, WebAuthenticationDetail):
self.WebAuthenticationDetail = WebAuthenticationDetail
def get_ClientDetail(self):
return self.ClientDetail
def set_ClientDetail(self, ClientDetail):
self.ClientDetail = ClientDetail
def get_TransactionDetail(self):
return self.TransactionDetail
def set_TransactionDetail(self, TransactionDetail):
self.TransactionDetail = TransactionDetail
def get_Version(self):
return self.Version
def set_Version(self, Version):
self.Version = Version
def get_EffectiveDate(self):
return self.EffectiveDate
def set_EffectiveDate(self, EffectiveDate):
self.EffectiveDate = EffectiveDate
def get_LocationsSearchCriterion(self):
return self.LocationsSearchCriterion
def set_LocationsSearchCriterion(self, LocationsSearchCriterion):
self.LocationsSearchCriterion = LocationsSearchCriterion
def get_ShipperAccountNumber(self):
return self.ShipperAccountNumber
def set_ShipperAccountNumber(self, ShipperAccountNumber):
self.ShipperAccountNumber = ShipperAccountNumber
def get_UniqueTrackingNumber(self):
return self.UniqueTrackingNumber
def set_UniqueTrackingNumber(self, UniqueTrackingNumber):
self.UniqueTrackingNumber = UniqueTrackingNumber
def get_Address(self):
return self.Address
def set_Address(self, Address):
self.Address = Address
def get_PhoneNumber(self):
return self.PhoneNumber
def set_PhoneNumber(self, PhoneNumber):
self.PhoneNumber = PhoneNumber
def get_GeographicCoordinates(self):
return self.GeographicCoordinates
def set_GeographicCoordinates(self, GeographicCoordinates):
self.GeographicCoordinates = GeographicCoordinates
def get_MultipleMatchesAction(self):
return self.MultipleMatchesAction
def set_MultipleMatchesAction(self, MultipleMatchesAction):
self.MultipleMatchesAction = MultipleMatchesAction
def get_SortDetail(self):
return self.SortDetail
def set_SortDetail(self, SortDetail):
self.SortDetail = SortDetail
def get_Constraints(self):
return self.Constraints
def set_Constraints(self, Constraints):
self.Constraints = Constraints
def validate_LocationsSearchCriteriaType(self, value):
result = True
# Validate type LocationsSearchCriteriaType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['ADDRESS', 'GEOGRAPHIC_COORDINATES', 'PHONE_NUMBER']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LocationsSearchCriteriaType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def validate_MultipleMatchesActionType(self, value):
result = True
# Validate type MultipleMatchesActionType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['RETURN_ALL', 'RETURN_ERROR', 'RETURN_FIRST']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on MultipleMatchesActionType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.WebAuthenticationDetail is not None or
self.ClientDetail is not None or
self.TransactionDetail is not None or
self.Version is not None or
self.EffectiveDate is not None or
self.LocationsSearchCriterion is not None or
self.ShipperAccountNumber is not None or
self.UniqueTrackingNumber is not None or
self.Address is not None or
self.PhoneNumber is not None or
self.GeographicCoordinates is not None or
self.MultipleMatchesAction is not None or
self.SortDetail is not None or
self.Constraints is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationsRequest', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SearchLocationsRequest')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'SearchLocationsRequest':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SearchLocationsRequest')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SearchLocationsRequest', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SearchLocationsRequest'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SearchLocationsRequest', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.WebAuthenticationDetail is not None:
namespaceprefix_ = self.WebAuthenticationDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.WebAuthenticationDetail_nsprefix_) else ''
self.WebAuthenticationDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='WebAuthenticationDetail', pretty_print=pretty_print)
if self.ClientDetail is not None:
namespaceprefix_ = self.ClientDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.ClientDetail_nsprefix_) else ''
self.ClientDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ClientDetail', pretty_print=pretty_print)
if self.TransactionDetail is not None:
namespaceprefix_ = self.TransactionDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.TransactionDetail_nsprefix_) else ''
self.TransactionDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TransactionDetail', pretty_print=pretty_print)
if self.Version is not None:
namespaceprefix_ = self.Version_nsprefix_ + ':' if (UseCapturedNS_ and self.Version_nsprefix_) else ''
self.Version.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Version', pretty_print=pretty_print)
if self.EffectiveDate is not None:
namespaceprefix_ = self.EffectiveDate_nsprefix_ + ':' if (UseCapturedNS_ and self.EffectiveDate_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sEffectiveDate>%s</%sEffectiveDate>%s' % (namespaceprefix_ , self.gds_format_date(self.EffectiveDate, input_name='EffectiveDate'), namespaceprefix_ , eol_))
if self.LocationsSearchCriterion is not None:
namespaceprefix_ = self.LocationsSearchCriterion_nsprefix_ + ':' if (UseCapturedNS_ and self.LocationsSearchCriterion_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocationsSearchCriterion>%s</%sLocationsSearchCriterion>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.LocationsSearchCriterion), input_name='LocationsSearchCriterion')), namespaceprefix_ , eol_))
if self.ShipperAccountNumber is not None:
namespaceprefix_ = self.ShipperAccountNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipperAccountNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sShipperAccountNumber>%s</%sShipperAccountNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ShipperAccountNumber), input_name='ShipperAccountNumber')), namespaceprefix_ , eol_))
if self.UniqueTrackingNumber is not None:
namespaceprefix_ = self.UniqueTrackingNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.UniqueTrackingNumber_nsprefix_) else ''
self.UniqueTrackingNumber.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UniqueTrackingNumber', pretty_print=pretty_print)
if self.Address is not None:
namespaceprefix_ = self.Address_nsprefix_ + ':' if (UseCapturedNS_ and self.Address_nsprefix_) else ''
self.Address.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Address', pretty_print=pretty_print)
if self.PhoneNumber is not None:
namespaceprefix_ = self.PhoneNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.PhoneNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPhoneNumber>%s</%sPhoneNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PhoneNumber), input_name='PhoneNumber')), namespaceprefix_ , eol_))
if self.GeographicCoordinates is not None:
namespaceprefix_ = self.GeographicCoordinates_nsprefix_ + ':' if (UseCapturedNS_ and self.GeographicCoordinates_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sGeographicCoordinates>%s</%sGeographicCoordinates>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.GeographicCoordinates), input_name='GeographicCoordinates')), namespaceprefix_ , eol_))
if self.MultipleMatchesAction is not None:
namespaceprefix_ = self.MultipleMatchesAction_nsprefix_ + ':' if (UseCapturedNS_ and self.MultipleMatchesAction_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMultipleMatchesAction>%s</%sMultipleMatchesAction>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.MultipleMatchesAction), input_name='MultipleMatchesAction')), namespaceprefix_ , eol_))
if self.SortDetail is not None:
namespaceprefix_ = self.SortDetail_nsprefix_ + ':' if (UseCapturedNS_ and self.SortDetail_nsprefix_) else ''
self.SortDetail.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SortDetail', pretty_print=pretty_print)
if self.Constraints is not None:
namespaceprefix_ = self.Constraints_nsprefix_ + ':' if (UseCapturedNS_ and self.Constraints_nsprefix_) else ''
self.Constraints.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Constraints', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'WebAuthenticationDetail':
obj_ = WebAuthenticationDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.WebAuthenticationDetail = obj_
obj_.original_tagname_ = 'WebAuthenticationDetail'
elif nodeName_ == 'ClientDetail':
obj_ = ClientDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ClientDetail = obj_
obj_.original_tagname_ = 'ClientDetail'
elif nodeName_ == 'TransactionDetail':
obj_ = TransactionDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TransactionDetail = obj_
obj_.original_tagname_ = 'TransactionDetail'
elif nodeName_ == 'Version':
obj_ = VersionId.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Version = obj_
obj_.original_tagname_ = 'Version'
elif nodeName_ == 'EffectiveDate':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.EffectiveDate = dval_
self.EffectiveDate_nsprefix_ = child_.prefix
elif nodeName_ == 'LocationsSearchCriterion':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'LocationsSearchCriterion')
value_ = self.gds_validate_string(value_, node, 'LocationsSearchCriterion')
self.LocationsSearchCriterion = value_
self.LocationsSearchCriterion_nsprefix_ = child_.prefix
# validate type LocationsSearchCriteriaType
self.validate_LocationsSearchCriteriaType(self.LocationsSearchCriterion)
elif nodeName_ == 'ShipperAccountNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ShipperAccountNumber')
value_ = self.gds_validate_string(value_, node, 'ShipperAccountNumber')
self.ShipperAccountNumber = value_
self.ShipperAccountNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'UniqueTrackingNumber':
obj_ = UniqueTrackingNumber.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UniqueTrackingNumber = obj_
obj_.original_tagname_ = 'UniqueTrackingNumber'
elif nodeName_ == 'Address':
obj_ = Address.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Address = obj_
obj_.original_tagname_ = 'Address'
elif nodeName_ == 'PhoneNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PhoneNumber')
value_ = self.gds_validate_string(value_, node, 'PhoneNumber')
self.PhoneNumber = value_
self.PhoneNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'GeographicCoordinates':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'GeographicCoordinates')
value_ = self.gds_validate_string(value_, node, 'GeographicCoordinates')
self.GeographicCoordinates = value_
self.GeographicCoordinates_nsprefix_ = child_.prefix
elif nodeName_ == 'MultipleMatchesAction':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MultipleMatchesAction')
value_ = self.gds_validate_string(value_, node, 'MultipleMatchesAction')
self.MultipleMatchesAction = value_
self.MultipleMatchesAction_nsprefix_ = child_.prefix
# validate type MultipleMatchesActionType
self.validate_MultipleMatchesActionType(self.MultipleMatchesAction)
elif nodeName_ == 'SortDetail':
obj_ = LocationSortDetail.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.SortDetail = obj_
obj_.original_tagname_ = 'SortDetail'
elif nodeName_ == 'Constraints':
obj_ = SearchLocationConstraints.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Constraints = obj_
obj_.original_tagname_ = 'Constraints'
# end class SearchLocationsRequest
class ShippingHoliday(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Holiday=None, UnavailableActions=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Holiday = Holiday
self.Holiday_nsprefix_ = None
if UnavailableActions is None:
self.UnavailableActions = []
else:
self.UnavailableActions = UnavailableActions
self.UnavailableActions_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ShippingHoliday)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ShippingHoliday.subclass:
return ShippingHoliday.subclass(*args_, **kwargs_)
else:
return ShippingHoliday(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Holiday(self):
return self.Holiday
def set_Holiday(self, Holiday):
self.Holiday = Holiday
def get_UnavailableActions(self):
return self.UnavailableActions
def set_UnavailableActions(self, UnavailableActions):
self.UnavailableActions = UnavailableActions
def add_UnavailableActions(self, value):
self.UnavailableActions.append(value)
def insert_UnavailableActions_at(self, index, value):
self.UnavailableActions.insert(index, value)
def replace_UnavailableActions_at(self, index, value):
self.UnavailableActions[index] = value
def validate_ShippingActionType(self, value):
result = True
# Validate type ShippingActionType, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['DELIVERIES', 'PICKUPS']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ShippingActionType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Holiday is not None or
self.UnavailableActions
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingHoliday', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ShippingHoliday')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ShippingHoliday':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ShippingHoliday')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ShippingHoliday', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ShippingHoliday'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ShippingHoliday', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Holiday is not None:
namespaceprefix_ = self.Holiday_nsprefix_ + ':' if (UseCapturedNS_ and self.Holiday_nsprefix_) else ''
self.Holiday.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Holiday', pretty_print=pretty_print)
for UnavailableActions_ in self.UnavailableActions:
namespaceprefix_ = self.UnavailableActions_nsprefix_ + ':' if (UseCapturedNS_ and self.UnavailableActions_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUnavailableActions>%s</%sUnavailableActions>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(UnavailableActions_), input_name='UnavailableActions')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Holiday':
obj_ = Holiday.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Holiday = obj_
obj_.original_tagname_ = 'Holiday'
elif nodeName_ == 'UnavailableActions':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'UnavailableActions')
value_ = self.gds_validate_string(value_, node, 'UnavailableActions')
self.UnavailableActions.append(value_)
self.UnavailableActions_nsprefix_ = child_.prefix
# validate type ShippingActionType
self.validate_ShippingActionType(self.UnavailableActions[-1])
# end class ShippingHoliday
class TimeRange(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Begins=None, Ends=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
if isinstance(Begins, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Begins, '%H:%M:%S').time()
else:
initvalue_ = Begins
self.Begins = initvalue_
self.Begins_nsprefix_ = None
if isinstance(Ends, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Ends, '%H:%M:%S').time()
else:
initvalue_ = Ends
self.Ends = initvalue_
self.Ends_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TimeRange)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TimeRange.subclass:
return TimeRange.subclass(*args_, **kwargs_)
else:
return TimeRange(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Begins(self):
return self.Begins
def set_Begins(self, Begins):
self.Begins = Begins
def get_Ends(self):
return self.Ends
def set_Ends(self, Ends):
self.Ends = Ends
def hasContent_(self):
if (
self.Begins is not None or
self.Ends is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeRange', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimeRange')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TimeRange':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimeRange')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimeRange', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimeRange'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeRange', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Begins is not None:
namespaceprefix_ = self.Begins_nsprefix_ + ':' if (UseCapturedNS_ and self.Begins_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sBegins>%s</%sBegins>%s' % (namespaceprefix_ , self.gds_format_time(self.Begins, input_name='Begins'), namespaceprefix_ , eol_))
if self.Ends is not None:
namespaceprefix_ = self.Ends_nsprefix_ + ':' if (UseCapturedNS_ and self.Ends_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sEnds>%s</%sEnds>%s' % (namespaceprefix_ , self.gds_format_time(self.Ends, input_name='Ends'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Begins':
sval_ = child_.text
dval_ = self.gds_parse_time(sval_)
self.Begins = dval_
self.Begins_nsprefix_ = child_.prefix
elif nodeName_ == 'Ends':
sval_ = child_.text
dval_ = self.gds_parse_time(sval_)
self.Ends = dval_
self.Ends_nsprefix_ = child_.prefix
# end class TimeRange
class TransactionDetail(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, CustomerTransactionId=None, Localization=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.CustomerTransactionId = CustomerTransactionId
self.CustomerTransactionId_nsprefix_ = None
self.Localization = Localization
self.Localization_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransactionDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransactionDetail.subclass:
return TransactionDetail.subclass(*args_, **kwargs_)
else:
return TransactionDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_CustomerTransactionId(self):
return self.CustomerTransactionId
def set_CustomerTransactionId(self, CustomerTransactionId):
self.CustomerTransactionId = CustomerTransactionId
def get_Localization(self):
return self.Localization
def set_Localization(self, Localization):
self.Localization = Localization
def hasContent_(self):
if (
self.CustomerTransactionId is not None or
self.Localization is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransactionDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransactionDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TransactionDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransactionDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransactionDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransactionDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransactionDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CustomerTransactionId is not None:
namespaceprefix_ = self.CustomerTransactionId_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomerTransactionId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCustomerTransactionId>%s</%sCustomerTransactionId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CustomerTransactionId), input_name='CustomerTransactionId')), namespaceprefix_ , eol_))
if self.Localization is not None:
namespaceprefix_ = self.Localization_nsprefix_ + ':' if (UseCapturedNS_ and self.Localization_nsprefix_) else ''
self.Localization.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Localization', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'CustomerTransactionId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CustomerTransactionId')
value_ = self.gds_validate_string(value_, node, 'CustomerTransactionId')
self.CustomerTransactionId = value_
self.CustomerTransactionId_nsprefix_ = child_.prefix
elif nodeName_ == 'Localization':
obj_ = Localization.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Localization = obj_
obj_.original_tagname_ = 'Localization'
# end class TransactionDetail
class UniqueTrackingNumber(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, TrackingNumber=None, TrackingNumberUniqueIdentifier=None, ShipDate=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.TrackingNumber = TrackingNumber
self.TrackingNumber_nsprefix_ = None
self.TrackingNumberUniqueIdentifier = TrackingNumberUniqueIdentifier
self.TrackingNumberUniqueIdentifier_nsprefix_ = None
if isinstance(ShipDate, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(ShipDate, '%Y-%m-%d').date()
else:
initvalue_ = ShipDate
self.ShipDate = initvalue_
self.ShipDate_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UniqueTrackingNumber)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UniqueTrackingNumber.subclass:
return UniqueTrackingNumber.subclass(*args_, **kwargs_)
else:
return UniqueTrackingNumber(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_TrackingNumber(self):
return self.TrackingNumber
def set_TrackingNumber(self, TrackingNumber):
self.TrackingNumber = TrackingNumber
def get_TrackingNumberUniqueIdentifier(self):
return self.TrackingNumberUniqueIdentifier
def set_TrackingNumberUniqueIdentifier(self, TrackingNumberUniqueIdentifier):
self.TrackingNumberUniqueIdentifier = TrackingNumberUniqueIdentifier
def get_ShipDate(self):
return self.ShipDate
def set_ShipDate(self, ShipDate):
self.ShipDate = ShipDate
def hasContent_(self):
if (
self.TrackingNumber is not None or
self.TrackingNumberUniqueIdentifier is not None or
self.ShipDate is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UniqueTrackingNumber', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UniqueTrackingNumber')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UniqueTrackingNumber':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UniqueTrackingNumber')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UniqueTrackingNumber', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UniqueTrackingNumber'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UniqueTrackingNumber', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TrackingNumber is not None:
namespaceprefix_ = self.TrackingNumber_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingNumber_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTrackingNumber>%s</%sTrackingNumber>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingNumber), input_name='TrackingNumber')), namespaceprefix_ , eol_))
if self.TrackingNumberUniqueIdentifier is not None:
namespaceprefix_ = self.TrackingNumberUniqueIdentifier_nsprefix_ + ':' if (UseCapturedNS_ and self.TrackingNumberUniqueIdentifier_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sTrackingNumberUniqueIdentifier>%s</%sTrackingNumberUniqueIdentifier>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TrackingNumberUniqueIdentifier), input_name='TrackingNumberUniqueIdentifier')), namespaceprefix_ , eol_))
if self.ShipDate is not None:
namespaceprefix_ = self.ShipDate_nsprefix_ + ':' if (UseCapturedNS_ and self.ShipDate_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sShipDate>%s</%sShipDate>%s' % (namespaceprefix_ , self.gds_format_date(self.ShipDate, input_name='ShipDate'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'TrackingNumber':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TrackingNumber')
value_ = self.gds_validate_string(value_, node, 'TrackingNumber')
self.TrackingNumber = value_
self.TrackingNumber_nsprefix_ = child_.prefix
elif nodeName_ == 'TrackingNumberUniqueIdentifier':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TrackingNumberUniqueIdentifier')
value_ = self.gds_validate_string(value_, node, 'TrackingNumberUniqueIdentifier')
self.TrackingNumberUniqueIdentifier = value_
self.TrackingNumberUniqueIdentifier_nsprefix_ = child_.prefix
elif nodeName_ == 'ShipDate':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.ShipDate = dval_
self.ShipDate_nsprefix_ = child_.prefix
# end class UniqueTrackingNumber
class Weight(GeneratedsSuper):
"""The descriptive data for the heaviness of an object."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Units=None, Value=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Units = Units
self.validate_WeightUnits(self.Units)
self.Units_nsprefix_ = None
self.Value = Value
self.Value_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Weight)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Weight.subclass:
return Weight.subclass(*args_, **kwargs_)
else:
return Weight(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Units(self):
return self.Units
def set_Units(self, Units):
self.Units = Units
def get_Value(self):
return self.Value
def set_Value(self, Value):
self.Value = Value
def validate_WeightUnits(self, value):
result = True
# Validate type WeightUnits, a restriction on xs:string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['KG', 'LB']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on WeightUnits' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
return result
def hasContent_(self):
if (
self.Units is not None or
self.Value is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Weight', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Weight')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'Weight':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Weight')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Weight', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Weight'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Weight', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Units is not None:
namespaceprefix_ = self.Units_nsprefix_ + ':' if (UseCapturedNS_ and self.Units_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUnits>%s</%sUnits>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Units), input_name='Units')), namespaceprefix_ , eol_))
if self.Value is not None:
namespaceprefix_ = self.Value_nsprefix_ + ':' if (UseCapturedNS_ and self.Value_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sValue>%s</%sValue>%s' % (namespaceprefix_ , self.gds_format_decimal(self.Value, input_name='Value'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Units':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Units')
value_ = self.gds_validate_string(value_, node, 'Units')
self.Units = value_
self.Units_nsprefix_ = child_.prefix
# validate type WeightUnits
self.validate_WeightUnits(self.Units)
elif nodeName_ == 'Value' and child_.text:
sval_ = child_.text
fval_ = self.gds_parse_decimal(sval_, node, 'Value')
fval_ = self.gds_validate_decimal(fval_, node, 'Value')
self.Value = fval_
self.Value_nsprefix_ = child_.prefix
# end class Weight
class WebAuthenticationDetail(GeneratedsSuper):
"""Used in authentication of the sender's identity."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, ParentCredential=None, UserCredential=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.ParentCredential = ParentCredential
self.ParentCredential_nsprefix_ = None
self.UserCredential = UserCredential
self.UserCredential_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, WebAuthenticationDetail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if WebAuthenticationDetail.subclass:
return WebAuthenticationDetail.subclass(*args_, **kwargs_)
else:
return WebAuthenticationDetail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_ParentCredential(self):
return self.ParentCredential
def set_ParentCredential(self, ParentCredential):
self.ParentCredential = ParentCredential
def get_UserCredential(self):
return self.UserCredential
def set_UserCredential(self, UserCredential):
self.UserCredential = UserCredential
def hasContent_(self):
if (
self.ParentCredential is not None or
self.UserCredential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='WebAuthenticationDetail', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('WebAuthenticationDetail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'WebAuthenticationDetail':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='WebAuthenticationDetail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='WebAuthenticationDetail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='WebAuthenticationDetail'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='WebAuthenticationDetail', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ParentCredential is not None:
namespaceprefix_ = self.ParentCredential_nsprefix_ + ':' if (UseCapturedNS_ and self.ParentCredential_nsprefix_) else ''
self.ParentCredential.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ParentCredential', pretty_print=pretty_print)
if self.UserCredential is not None:
namespaceprefix_ = self.UserCredential_nsprefix_ + ':' if (UseCapturedNS_ and self.UserCredential_nsprefix_) else ''
self.UserCredential.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserCredential', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'ParentCredential':
obj_ = WebAuthenticationCredential.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ParentCredential = obj_
obj_.original_tagname_ = 'ParentCredential'
elif nodeName_ == 'UserCredential':
obj_ = WebAuthenticationCredential.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserCredential = obj_
obj_.original_tagname_ = 'UserCredential'
# end class WebAuthenticationDetail
class WebAuthenticationCredential(GeneratedsSuper):
"""Two part authentication string used for the sender's identity"""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, Key=None, Password=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.Key = Key
self.Key_nsprefix_ = None
self.Password = Password
self.Password_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, WebAuthenticationCredential)
if subclass is not None:
return subclass(*args_, **kwargs_)
if WebAuthenticationCredential.subclass:
return WebAuthenticationCredential.subclass(*args_, **kwargs_)
else:
return WebAuthenticationCredential(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_Password(self):
return self.Password
def set_Password(self, Password):
self.Password = Password
def hasContent_(self):
if (
self.Key is not None or
self.Password is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='WebAuthenticationCredential', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('WebAuthenticationCredential')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'WebAuthenticationCredential':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='WebAuthenticationCredential')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='WebAuthenticationCredential', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='WebAuthenticationCredential'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='WebAuthenticationCredential', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Key is not None:
namespaceprefix_ = self.Key_nsprefix_ + ':' if (UseCapturedNS_ and self.Key_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')), namespaceprefix_ , eol_))
if self.Password is not None:
namespaceprefix_ = self.Password_nsprefix_ + ':' if (UseCapturedNS_ and self.Password_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPassword>%s</%sPassword>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Password), input_name='Password')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
self.Key_nsprefix_ = child_.prefix
elif nodeName_ == 'Password':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Password')
value_ = self.gds_validate_string(value_, node, 'Password')
self.Password = value_
self.Password_nsprefix_ = child_.prefix
# end class WebAuthenticationCredential
class VersionId(GeneratedsSuper):
"""Identifies the version/level of a service operation expected by a caller
(in each request) and performed by the callee (in each reply)."""
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, ServiceId=None, Major=None, Intermediate=None, Minor=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.ServiceId = ServiceId
self.ServiceId_nsprefix_ = None
self.Major = Major
self.Major_nsprefix_ = None
self.Intermediate = Intermediate
self.Intermediate_nsprefix_ = None
self.Minor = Minor
self.Minor_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, VersionId)
if subclass is not None:
return subclass(*args_, **kwargs_)
if VersionId.subclass:
return VersionId.subclass(*args_, **kwargs_)
else:
return VersionId(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_ServiceId(self):
return self.ServiceId
def set_ServiceId(self, ServiceId):
self.ServiceId = ServiceId
def get_Major(self):
return self.Major
def set_Major(self, Major):
self.Major = Major
def get_Intermediate(self):
return self.Intermediate
def set_Intermediate(self, Intermediate):
self.Intermediate = Intermediate
def get_Minor(self):
return self.Minor
def set_Minor(self, Minor):
self.Minor = Minor
def hasContent_(self):
if (
self.ServiceId is not None or
self.Major is not None or
self.Intermediate is not None or
self.Minor is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VersionId', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('VersionId')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'VersionId':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VersionId')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VersionId', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VersionId'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VersionId', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ServiceId is not None:
namespaceprefix_ = self.ServiceId_nsprefix_ + ':' if (UseCapturedNS_ and self.ServiceId_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sServiceId>%s</%sServiceId>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ServiceId), input_name='ServiceId')), namespaceprefix_ , eol_))
if self.Major is not None:
namespaceprefix_ = self.Major_nsprefix_ + ':' if (UseCapturedNS_ and self.Major_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMajor>%s</%sMajor>%s' % (namespaceprefix_ , self.gds_format_integer(self.Major, input_name='Major'), namespaceprefix_ , eol_))
if self.Intermediate is not None:
namespaceprefix_ = self.Intermediate_nsprefix_ + ':' if (UseCapturedNS_ and self.Intermediate_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sIntermediate>%s</%sIntermediate>%s' % (namespaceprefix_ , self.gds_format_integer(self.Intermediate, input_name='Intermediate'), namespaceprefix_ , eol_))
if self.Minor is not None:
namespaceprefix_ = self.Minor_nsprefix_ + ':' if (UseCapturedNS_ and self.Minor_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sMinor>%s</%sMinor>%s' % (namespaceprefix_ , self.gds_format_integer(self.Minor, input_name='Minor'), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'ServiceId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ServiceId')
value_ = self.gds_validate_string(value_, node, 'ServiceId')
self.ServiceId = value_
self.ServiceId_nsprefix_ = child_.prefix
elif nodeName_ == 'Major' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Major')
ival_ = self.gds_validate_integer(ival_, node, 'Major')
self.Major = ival_
self.Major_nsprefix_ = child_.prefix
elif nodeName_ == 'Intermediate' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Intermediate')
ival_ = self.gds_validate_integer(ival_, node, 'Intermediate')
self.Intermediate = ival_
self.Intermediate_nsprefix_ = child_.prefix
elif nodeName_ == 'Minor' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Minor')
ival_ = self.gds_validate_integer(ival_, node, 'Minor')
self.Minor = ival_
self.Minor_nsprefix_ = child_.prefix
# end class VersionId
GDSClassesMapping = {
'SearchLocationsReply': SearchLocationsReply,
'SearchLocationsRequest': SearchLocationsRequest,
'ValidateLocationAvailabilityRequest': ValidateLocationAvailabilityRequest,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def get_required_ns_prefix_defs(rootNode):
'''Get all name space prefix definitions required in this XML doc.
Return a dictionary of definitions and a char string of definitions.
'''
nsmap = {
prefix: uri
for node in rootNode.iter()
for (prefix, uri) in node.nsmap.items()
if prefix is not None
}
namespacedefs = ' '.join([
'xmlns:{}="{}"'.format(prefix, uri)
for prefix, uri in nsmap.items()
])
return nsmap, namespacedefs
def parse(inFileName, silence=False, print_warnings=True):
global CapturedNsmap_
gds_collector = GdsCollector_()
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'SearchLocationsReply'
rootClass = SearchLocationsReply
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode)
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_=namespacedefs,
pretty_print=True)
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def parseEtree(inFileName, silence=False, print_warnings=True):
parser = None
doc = parsexml_(inFileName, parser)
gds_collector = GdsCollector_()
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'SearchLocationsReply'
rootClass = SearchLocationsReply
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
# Enable Python to collect the space used by the DOM.
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(str(content))
sys.stdout.write('\n')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False, print_warnings=True):
'''Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
'''
parser = None
rootNode= parsexmlstring_(inString, parser)
gds_collector = GdsCollector_()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'SearchLocationsReply'
rootClass = SearchLocationsReply
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
if not SaveElementTreeNode:
rootNode = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:ns="http://fedex.com/ws/locs/v11"')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def parseLiteral(inFileName, silence=False, print_warnings=True):
parser = None
doc = parsexml_(inFileName, parser)
gds_collector = GdsCollector_()
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'SearchLocationsReply'
rootClass = SearchLocationsReply
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
# Enable Python to collect the space used by the DOM.
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
sys.stdout.write('#from location_service_v11 import *\n\n')
sys.stdout.write('import location_service_v11 as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
RenameMappings_ = {
}
__all__ = [
"Address",
"AddressAncillaryDetail",
"AddressToLocationRelationshipDetail",
"CarrierDetail",
"ClearanceCountryDetail",
"ClearanceLocationDetail",
"ClientDetail",
"Contact",
"DateRange",
"Dimensions",
"Distance",
"DistanceAndLocationDetail",
"EnterprisePrivilegeDetail",
"Holiday",
"LatestDropOffDetail",
"LatestDropoffOverlayDetail",
"Localization",
"LocationCapabilityDetail",
"LocationContactAndAddress",
"LocationDetail",
"LocationHours",
"LocationIdentificationDetail",
"LocationPackageLimitsDetail",
"LocationSortDetail",
"LocationSupportedPackageDetail",
"LocationSupportedShipmentDetail",
"Notification",
"NotificationParameter",
"ReservationAvailabilityDetail",
"RestrictionsAndPrivilegesPolicyDetail",
"SearchLocationConstraints",
"SearchLocationsReply",
"SearchLocationsRequest",
"ShippingHoliday",
"TimeRange",
"TransactionDetail",
"UniqueTrackingNumber",
"ValidateLocationAvailabilityRequest",
"VersionId",
"WebAuthenticationCredential",
"WebAuthenticationDetail",
"Weight"
]
| 51.282941 | 513 | 0.664514 | 431,739 | 0.961266 | 0 | 0 | 4,040 | 0.008995 | 0 | 0 | 54,530 | 0.121411 |
47179f5bf12e9b03efc4a0874bcc2fffe620b647 | 347 | py | Python | context_cache/context_cache.py | tervay/the-blue-alliance | e14c15cb04b455f90a2fcfdf4c1cdbf8454e17f8 | [
"MIT"
] | 266 | 2015-01-04T00:10:48.000Z | 2022-03-28T18:42:05.000Z | context_cache/context_cache.py | gregmarra/the-blue-alliance | 5bedaf5c80b4623984760d3da3289640639112f9 | [
"MIT"
] | 2,673 | 2015-01-01T20:14:33.000Z | 2022-03-31T18:17:16.000Z | context_cache/context_cache.py | gregmarra/the-blue-alliance | 5bedaf5c80b4623984760d3da3289640639112f9 | [
"MIT"
] | 230 | 2015-01-04T00:10:48.000Z | 2022-03-26T18:12:04.000Z | from google.appengine.ext import ndb
CACHE_DATA = {}
def get(cache_key):
full_cache_key = '{}:{}'.format(cache_key, ndb.get_context().__hash__())
return CACHE_DATA.get(full_cache_key, None)
def set(cache_key, value):
full_cache_key = '{}:{}'.format(cache_key, ndb.get_context().__hash__())
CACHE_DATA[full_cache_key] = value
| 23.133333 | 76 | 0.70317 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.040346 |
4717bd47c7c38c3762d1b1d17800ed74ea5afe27 | 3,762 | py | Python | pytma/Sentiment.py | aloidia-solutions/nlp-modelling | cc181bf08c5f93d6949d47bb5212dcefb336279d | [
"MIT"
] | 1 | 2019-07-25T21:18:10.000Z | 2019-07-25T21:18:10.000Z | pytma/Sentiment.py | brucebcampbell/nlp-modelling | b6e5b6c5b2df7cc20e7de68f32d164981353aaf9 | [
"MIT"
] | 13 | 2019-08-08T15:42:27.000Z | 2019-09-29T20:34:14.000Z | pytma/Sentiment.py | aloidia-solutions/nlp-modelling | cc181bf08c5f93d6949d47bb5212dcefb336279d | [
"MIT"
] | 1 | 2019-10-23T19:51:41.000Z | 2019-10-23T19:51:41.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import nltk
from nltk.tokenize import word_tokenize
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from pytma.Utility import log
class Sentiment:
"""
Sentiment analyzer. Supervised via NaiveBayes or unsupervised via Valence method.
"""
def __init__(self):
"""
Initializes class. Downloads vader lexicon.
"""
nltk.download('vader_lexicon')
self.vader_polarity_scores = list()
def naiveBayesSentimentFit(self, text):
"""
Supervised sentiment analysis - uses nltk to fit Naive Bayes .
Call naiveBayesSentimentPredict to predict using the latest fit model.
:param text:
:return:
"""
dictionary = set(word.lower() for passage in text for word in word_tokenize(passage[0]))
self.nb_dict = dictionary
t = [({word: (word in word_tokenize(x[0])) for word in dictionary}, x[1]) for x in text]
classifier = nltk.NaiveBayesClassifier.train(t)
self.nb_classifier = classifier
def naiveBayesSentimentPredict(self,text):
"""
Predicts sentiment. Call naiveBayesSentimentFit on a corpus first otherwise
an error will be thrown.
:param text:
:return predicted sentiment:
"""
if hasattr( self,"nb_classifier") is False:
raise AttributeError
test_data_features = {word.lower(): (word in word_tokenize(text.lower())) for word in self.nb_dict}
result = self.nb_classifier.classify(test_data_features)
log.info("Sentiment NB predict : " + result)
return result
def valenceSentiment(self,text):
"""
Unsupervised sentiment analysis.
:param text:
:return sentiment polarity scores:
"""
sid = SentimentIntensityAnalyzer()
for sentence in text:
log.info(sentence)
ss = sid.polarity_scores(sentence)
for k in ss:
print(k)
print(ss[k])
log.info('Logging Sentiment : {0}: {1}, '.format(k, ss[k]))
self.vader_polarity_scores.append(k)
return self.vader_polarity_scores
if __name__ == '__main__':
#This will be the unit test
test_text_supervised = [("Great place to be when you are in Bangalore.", "pos"),
("The place was being renovated when I visited so the seating was limited.", "neg"),
("Loved the ambience, loved the food", "pos"),
("The food is delicious but not over the top.", "neg"),
("Service - Little slow, probably because too many people.", "neg"),
("The place is not easy to locate", "neg"),
("Mushroom fried rice was spicy", "pos"),
]
test_text_unsupervised = ["Great place to be when you are in Bangalore.",
"The place was being renovated when I visited so the seating was limited.",
"Loved the ambience, loved the food", "The food is delicious but not over the top.",
"Service - Little slow, probably because too many people.",
"The place is not easy to locate", "Mushroom fried rice was tasty"]
sent= Sentiment()
sent.naiveBayesSentimentFit(test_text_supervised)
test_data = "Manchurian was hot and spicy"
nb_sentiment =sent.naiveBayesSentimentPredict(test_data)
polarity_scores = sent.valenceSentiment(test_text_unsupervised)
log.info("Logging polarity scores "+ " ".join(polarity_scores ) )
log.info("done")
| 35.158879 | 114 | 0.598086 | 2,044 | 0.543328 | 0 | 0 | 0 | 0 | 0 | 0 | 1,597 | 0.424508 |
4717fca0de416a008ce00cf368fbdddc1489506d | 14,104 | py | Python | aiida_castep/calculations/tools.py | asamli/aiida-castep | 893113152460a632439c91652211381091566645 | [
"MIT"
] | 3 | 2021-09-02T16:02:47.000Z | 2021-12-17T22:38:20.000Z | aiida_castep/calculations/tools.py | asamli/aiida-castep | 893113152460a632439c91652211381091566645 | [
"MIT"
] | 16 | 2020-05-07T07:58:01.000Z | 2022-03-21T11:35:35.000Z | aiida_castep/calculations/tools.py | asamli/aiida-castep | 893113152460a632439c91652211381091566645 | [
"MIT"
] | 3 | 2020-05-25T13:05:51.000Z | 2021-12-17T22:39:12.000Z | """
Tools for calculations
"""
import warnings
from aiida.tools import CalculationTools
from aiida.common import InputValidationError
from aiida.orm import CalcJobNode, Dict
from aiida.common.links import LinkType
from aiida.plugins import DataFactory
from aiida.engine import CalcJob, ProcessBuilder
from aiida_castep.common import INPUT_LINKNAMES, OUTPUT_LINKNAMES
__all__ = [
'CastepCalcTools', 'create_restart', 'castep_input_summary',
'update_parameters', 'use_pseudos_from_family'
]
class CastepCalcTools(CalculationTools):
def get_castep_input_summary(self):
return castep_input_summary(self._node)
def compare_with(self, the_other_calc, reverse=False):
"""
Compare with another calculation
Look for difference in get_castep_input_summary functions
:params node: pk or uuid or node
:params reverse: reverse the comparison, by default this node
is the "new" and the one compared with is "old".
"""
if isinstance(the_other_calc, (int, str)):
from aiida.orm import load_node
calc2 = load_node(the_other_calc)
else:
calc2 = the_other_calc
from deepdiff import DeepDiff
this_param = castep_input_summary(self._node)
other_param = castep_input_summary(calc2)
if reverse is True:
res = DeepDiff(this_param, other_param)
else:
res = DeepDiff(other_param, this_param)
return res
def create_restart(self,
ignore_state=False,
restart_mode='restart',
use_output_structure=False,
**kwargs):
if self._node.exit_status != 0 and not ignore_state:
raise RuntimeError(
'exit_status is not 0. Set ignore_state to ignore')
builder = create_restart(self._node.get_builder_restart(),
calcjob=self._node,
restart_mode=restart_mode,
**kwargs)
# Carry over the label
builder.metadata.label = self._node.label
if use_output_structure is True:
builder[
INPUT_LINKNAMES['structure']] = self._node.outputs.__getattr__(
OUTPUT_LINKNAMES['structure'])
if restart_mode == 'continuation' or kwargs.get('reuse'):
builder[INPUT_LINKNAMES[
'parent_calc_folder']] = self._node.outputs.__getattr__(
'remote_folder')
return builder
def use_pseudos_from_family(builder, family_name):
"""
Set the pseudos port namespace for a builder using pseudo family name
:note: The structure must already be set in the builder.
:param builder: ProcessBuilder instance to be processed, it must have a structure
:param family_name: the name of the group containing the pseudos
:returns: The same builder with the pseudopotential set
"""
from collections import defaultdict
from aiida_castep.data import get_pseudos_from_structure
# A dict {kind_name: pseudo_object}
# But we want to run with use_pseudo(pseudo, kinds)
structure = builder.get(INPUT_LINKNAMES['structure'], None)
if structure is None:
raise RuntimeError('The builder must have a StructureData')
kind_pseudo_dict = get_pseudos_from_structure(structure, family_name)
for kind, pseudo in kind_pseudo_dict.items():
builder.pseudos.__setattr__(kind, pseudo)
return builder
def castep_input_summary(calc):
"""
Convenient fuction for getting a summary of the
input of this calculation
:param calc: A CalcJobNode or ProcessBuilder or a nested input dictionary
:returns: A dictionary
"""
out_info = {}
# Check what is passed
if isinstance(calc, CalcJobNode):
inp_dict = calc.get_incoming(link_type=(LinkType.INPUT_CALC,
LinkType.INPUT_WORK)).nested()
options = calc.get_options()
metadata = {} # Metadata is empty when Node is passed
is_node = True
elif isinstance(calc, ProcessBuilder):
# Case of builder
inp_dict = calc._data
metadata = calc.metadata._data
options = calc.metadata.get('options', {})
is_node = False
elif isinstance(calc, dict):
# Case of a input dictionary
inp_dict = calc
metadata = calc.get('metadata', {})
options = metadata.get('options', {})
is_node = False
def get_node(label):
"""Get node from input dictionary"""
return inp_dict.get(INPUT_LINKNAMES[label])
in_param = get_node('parameters')
in_kpn = get_node('kpoints')
in_settings = get_node('settings')
in_structure = get_node('structure')
in_code = inp_dict.get('code')
in_remote = get_node('parent_calc_folder')
pseudos = inp_dict.get('pseudos')
param_dict = in_param.get_dict()
out_info.update(param_dict)
out_info["kpoints"] = in_kpn.get_description()
out_info["structure"] = {
"formula": in_structure.get_formula(),
"cell": in_structure.cell,
"label": in_structure.label
}
out_info["code"] = in_code
out_info["computer"] = calc.computer if is_node else in_code.computer
out_info["resources"] = options.get('resources')
out_info["custom_scheduler_commands"] = options.get(
'custom_scheduler_commands')
out_info["qos"] = options.get('qos')
out_info["account"] = options.get('account')
out_info["wallclock"] = options.get('max_wallclock_seconds')
out_info["label"] = calc.label if is_node else metadata.get('label')
out_info["description"] = calc.description if is_node else metadata.get(
'description')
# Show the parent calculation whose RemoteData is linked to the node
if in_remote is not None:
input_calc = [
n.node for n in in_remote.get_incoming(link_type=LinkType.CREATE)
]
assert len(
input_calc
) < 2, "More than one JobCalculation found, something seriously wrong"
if input_calc:
input_calc = input_calc[0]
out_info["parent_calc"] = {
"pk": input_calc.pk,
"label": input_calc.label
}
out_info["parent_calc_folder"] = in_remote
if in_settings is not None:
out_info["settings"] = in_settings.get_dict()
out_info["pseudos"] = pseudos
return out_info
def update_parameters(inputs, force=False, delete=None, **kwargs):
"""
Convenient function to update the parameters of the calculation.
Will atomiatically set the PARAM or CELL field in unstored
ParaemterData linked to the calculation.
If no ``Dict`` is linked to the calculation, a new node will be
created.
..note:
This method relies on the help information to check and assign
keywords to PARAM or CELL field of the Dict
(i.e for generating .param and .cell file)
calc.update_parameters(task="singlepoint")
:param force: flag to force the update even if the Dict node is stored.
:param delete: A list of the keywords to be deleted.
"""
param_node = inputs.get(INPUT_LINKNAMES['parameters'])
# Create the node if none is found
if param_node is None:
warnings.warn("No existing Dict node found, creating a new one.")
param_node = Dict(dict={"CELL": {}, "PARAM": {}})
inputs[INPUT_LINKNAMES['parameters']] = param_node
if isinstance(param_node, Dict) and param_node.is_stored:
if force:
# Create a new node if the existing node is stored
param_node = Dict(dict=param_node.get_dict())
inputs[INPUT_LINKNAMES['parameters']] = param_node
else:
raise RuntimeError("The input Dict<{}> is already stored".format(
param_node.pk))
# If the `node` is just a plain dict, we keep it that way
if isinstance(param_node, Dict):
param_dict = param_node.get_dict()
py_dict = False
else:
param_dict = param_node
py_dict = True
# Update the dictionary
from .helper import HelperCheckError, CastepHelper
helper = CastepHelper()
dict_update, not_found = helper._from_flat_dict(kwargs)
if not_found:
suggest = [helper.get_suggestion(i) for i in not_found]
error_string = "Following keys are invalid -- "
for error_key, sug in zip(not_found, suggest):
error_string += "{}: {}; ".format(error_key, sug)
raise HelperCheckError(error_string)
else:
param_dict["PARAM"].update(dict_update["PARAM"])
param_dict["CELL"].update(dict_update["CELL"])
# Delete any keys as requested
if delete:
for key in delete:
tmp1 = param_dict["PARAM"].pop(key, None)
tmp2 = param_dict["CELL"].pop(key, None)
if (tmp1 is None) and (tmp2 is None):
warnings.warn("Key '{}' not found".format(key))
# Apply the change to the node
if py_dict:
inputs[INPUT_LINKNAMES['parameters']] = param_dict
else:
param_node.set_dict(param_dict)
return inputs
def create_restart(inputs,
entry_point='castep.castep',
calcjob=None,
param_update=None,
param_delete=None,
restart_mode='restart',
use_castep_bin=False,
parent_folder=None,
reuse=False):
"""
Function to create a restart for a calculation.
:param inputs: A builder or nested dictionary
:param entry_point: Name of the entry points
:param param_update: Update the parameters
:param param_delete: A list of parameters to be deleted
:param restart_mode: Mode of the restart, 'continuation' or 'restart'
:param use_castep_bin: Use hte 'castep_bin' file instead of check
:param parent_folder: Remote folder to be used for restart
:param reuse: Use the reuse mode
"""
from aiida.plugins import CalculationFactory
from aiida.engine import ProcessBuilder
# Create the builder, in any case
if isinstance(inputs, dict):
processclass = CalculationFactory(entry_point)
builder = processclass.get_builder()
elif isinstance(inputs, ProcessBuilder):
builder = inputs._process_class.get_builder()
builder._update(inputs)
# Update list
update = {}
delete = []
# Set the restart tag
suffix = '.check' if not use_castep_bin else '.castep_bin'
if restart_mode == 'continuation':
update['continuation'] = 'parent/' + builder.metadata.seedname + suffix
delete.append('reuse')
elif restart_mode == 'restart' and reuse:
update['reuse'] = 'parent/' + builder.metadata.seedname + suffix
delete.append('continuation')
elif restart_mode is None:
delete.extend(['continuation', 'reuse'])
elif restart_mode != 'restart':
raise RuntimeError('Unknown restart mode: ' + restart_mode)
if param_update:
update.update(param_update)
if param_delete:
delete.extend(param_delete)
new_builder = update_parameters(builder,
force=True,
delete=delete,
**update)
# Set the parent folder
if parent_folder is not None:
new_builder[INPUT_LINKNAMES['parent_calc_folder']] = parent_folder
return new_builder
def validate_input_param(input_dict, allow_flat=False):
"""
Validate inputs parameters
:param input_dict: A Dict instance or python dict instance
"""
from .helper import CastepHelper
if isinstance(input_dict, Dict):
py_dict = input_dict.get_dict()
else:
py_dict = input_dict
helper = CastepHelper()
helper.check_dict(py_dict, auto_fix=False, allow_flat=allow_flat)
def input_param_validator(input_dict, port=None):
"""
Validator used for input ports
"""
from .helper import HelperCheckError
try:
validate_input_param(input_dict)
except HelperCheckError as error:
return error.args[0]
def flat_input_param_validator(input_dict, port=None):
"""
Validator that allows allow_flat parameter format
"""
from .helper import HelperCheckError
try:
validate_input_param(input_dict, allow_flat=True)
except HelperCheckError as error:
return error.args[0]
def check_restart(builder, verbose=False):
"""
Check the RemoteData reference by the builder is satisfied
:returns: True if OK
:raises: InputValidationError if error is found
"""
import os
from .utils import _lowercase_dict
def _print(inp):
if verbose:
print(inp)
paramdict = builder[INPUT_LINKNAMES['parameters']].get_dict()['PARAM']
paramdict = _lowercase_dict(paramdict, "paramdict")
stemp = paramdict.get("reuse", None)
if not stemp:
stemp = paramdict.get("continuation", None)
if stemp is not None:
fname = os.path.split(stemp)[-1]
_print("This calculation requires a restart file: '{}'".format(fname))
else:
# No restart file needed
_print("This calculation does not require a restart file.")
return True
# Now check if the remote folder has this file
remote_data = builder.get(INPUT_LINKNAMES["parent_calc_folder"])
if not remote_data:
raise InputValidationError(
"Restart requires "
"parent_folder to be specified".format(fname))
else:
_print("Checking remote directory")
folder_list = remote_data.listdir()
if fname not in folder_list:
raise InputValidationError(
"Restart file {}"
" is not in the remote folder".format(fname))
else:
_print("Check finished, restart file '{}' exists.".format(fname))
return True
| 34.568627 | 85 | 0.64308 | 2,090 | 0.148185 | 0 | 0 | 0 | 0 | 0 | 0 | 4,637 | 0.328772 |
4718a2b90ba5afaa7f04662ac1cca9de7cdf47a0 | 1,013 | py | Python | apps/addons/tests/test_search.py | muffinresearch/addons-server | 66613e9262a5e9475254091552de28a53b5b4072 | [
"BSD-3-Clause"
] | 1 | 2015-12-01T03:53:51.000Z | 2015-12-01T03:53:51.000Z | apps/addons/tests/test_search.py | magopian/olympia | 70cad15111a89e3d5c715cbade8925b12d1b98dc | [
"BSD-3-Clause"
] | 5 | 2021-02-02T23:09:35.000Z | 2021-09-08T02:47:20.000Z | apps/addons/tests/test_search.py | magopian/olympia | 70cad15111a89e3d5c715cbade8925b12d1b98dc | [
"BSD-3-Clause"
] | null | null | null | from nose.tools import eq_
import amo.tests
from addons.models import (Addon, attach_categories, attach_tags,
attach_translations)
from addons.search import extract
class TestExtract(amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestExtract, self).setUp()
self.attrs = ('id', 'slug', 'created', 'last_updated',
'weekly_downloads', 'average_daily_users', 'status',
'type', 'hotness', 'is_disabled', 'premium_type')
self.transforms = (attach_categories, attach_tags, attach_translations)
def _extract(self):
qs = Addon.objects.filter(id__in=[3615])
for t in self.transforms:
qs = qs.transform(t)
self.addon = list(qs)[0]
return extract(self.addon)
def test_extract_attributes(self):
extracted = self._extract()
for attr in self.attrs:
eq_(extracted[attr], getattr(self.addon, attr))
| 33.766667 | 79 | 0.622902 | 817 | 0.806515 | 0 | 0 | 0 | 0 | 0 | 0 | 151 | 0.149062 |
4718c3e6f88080fb3ddbf1b2d1feee70ac12c913 | 1,669 | py | Python | src/beanmachine/applications/hme/interface.py | horizon-blue/beanmachine-1 | b13e4e3e28ffb860947eb8046863b0cabb581222 | [
"MIT"
] | 177 | 2021-12-12T14:19:05.000Z | 2022-03-24T05:48:10.000Z | src/beanmachine/applications/hme/interface.py | horizon-blue/beanmachine-1 | b13e4e3e28ffb860947eb8046863b0cabb581222 | [
"MIT"
] | 171 | 2021-12-11T06:12:05.000Z | 2022-03-31T20:26:29.000Z | src/beanmachine/applications/hme/interface.py | horizon-blue/beanmachine-1 | b13e4e3e28ffb860947eb8046863b0cabb581222 | [
"MIT"
] | 31 | 2021-12-11T06:27:19.000Z | 2022-03-25T13:31:56.000Z | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Tuple
import pandas as pd
from .configs import InferConfig, ModelConfig
from .null_mixture_model import NullMixtureMixedEffectModel
class HME:
"""The Hierarchical Mixed Effect model interface.
:param data: observed train data
:param model_config: HME model configuration parameters
"""
def __init__(self, data: pd.DataFrame, model_config: ModelConfig) -> None:
self.model = NullMixtureMixedEffectModel(data, model_config)
self.posterior_samples = None
self.posterior_diagnostics = None
def infer(self, infer_config: InferConfig) -> Tuple[pd.DataFrame]:
"""Performs MCMC posterior inference on HME model parameters and
returns MCMC samples for those parameters registered in the query.
:param infer_config: configuration settings of posterior inference
:return: posterior samples and their diagnostic summary statistics
"""
self.posterior_samples, self.posterior_diagnostics = self.model.infer(
infer_config
)
return self.posterior_samples, self.posterior_diagnostics
def predict(self, new_data: pd.DataFrame) -> pd.DataFrame:
"""Computes predictive distributions on the new test data according to
MCMC posterior samples.
:param new_data: test data for prediction
:return: predictive distributions on the new test data
"""
return self.model.predict(new_data, self.posterior_samples)
| 34.770833 | 78 | 0.720791 | 1,333 | 0.798682 | 0 | 0 | 0 | 0 | 0 | 0 | 860 | 0.515279 |
471c0022039369de016a67c94f58c195fe745dc1 | 7,390 | py | Python | python-examples/bulk/bulk_send_complex.py | ryan-lydz/socketlabs-python | 1c6497205991e946d1630f83f452289476539833 | [
"MIT"
] | 10 | 2018-11-15T20:52:52.000Z | 2021-05-17T22:06:26.000Z | python-examples/bulk/bulk_send_complex.py | ryan-lydz/socketlabs-python | 1c6497205991e946d1630f83f452289476539833 | [
"MIT"
] | 1 | 2021-08-24T17:47:06.000Z | 2022-02-06T06:58:26.000Z | python-examples/bulk/bulk_send_complex.py | ryan-lydz/socketlabs-python | 1c6497205991e946d1630f83f452289476539833 | [
"MIT"
] | 4 | 2020-07-29T18:04:01.000Z | 2021-07-30T19:42:23.000Z | import json
import os
from socketlabs.injectionapi import SocketLabsClient
from socketlabs.injectionapi.message.__imports__ import \
Attachment, BulkMessage, BulkRecipient, CustomHeader, EmailAddress
# build the message
message = BulkMessage()
message.message_id = "ComplexExample"
message.mailing_id = "BulkSend"
message.charset = "UTF-8"
message.subject = "Sending A Complex Bulk Test Message"
message.html_body = "<html>" \
" <head><title>Sending A Complex Bulk Test Message</title></head>" \
" <body>" \
" <h1>Sending A Complex Test Message</h1>" \
" <h2>Merge Data</h2>" \
" <p>" \
" Motto = <b>%%Motto%%</b> </br>" \
" Birthday = <b>%%Birthday%%</b> </br>" \
" Age = <b>%%Age%%</b> </br>" \
" UpSell = <b>%%UpSell%%</b>" \
" </p>" \
" <h2>Example of Merge Usage</h2>" \
" <p>" \
" Our company motto is '<b>%%Motto%%</b>'. </br>" \
" Your birthday is <b>%%Birthday%%</b> and you are <b>%%Age%%</b> years old." \
" </p>" \
" <h2>UTF-8 Characters:</h2>" \
" <p>✔ - Check</p>" \
" <h2>Embedded Image:</h2>" \
" <p><img src='cid:bus' /></p>" \
" </body>" \
"</html>"
message.plain_text_body = "Sending A Complex Bulk Test Message" \
" Merged Data" \
" Motto = %%Motto%%" \
" Birthday = %%Birthday%%" \
" Age = %%Age%%" \
" UpSell = %%UpSell%%" \
" " \
" Example of Merge Usage" \
" Our company motto is '%%Motto%%'." \
" Your birthday is %%Birthday%% and you are %%Age%% years old."
message.amp_body = "<!doctype html>"\
"<html amp4email>" \
"<head>"\
"<title>Sending an AMP Test Message</title>"\
" <meta charset=\"utf-8\">"\
" <script async src=\"https://cdn.ampproject.org/v0.js\"></script>"\
" <style amp4email-boilerplate>body{visibility:hidden}</style>"\
" <style amp-custom>"\
" h1 {"\
" margin: 1rem;"\
" }"\
" </style>"\
"</head>"\
"<body>"\
" <h1>Sending An AMP Complex Test Message</h1>"\
" <h2>Merge Data</h2>"\
" <p>"\
" Motto = <b>%%Motto%%</b> </br>"\
" Birthday = <b>%%Birthday%%</b> </br>"\
" Age = <b>%%Age%%</b> </br>"\
" UpSell = <b>%%UpSell%%</b>"\
" </p>"\
" <h2>Example of Merge Usage</h2>"\
" <p>"\
" Our company motto is '<b>%%Motto%%</b>'. </br>"\
" Your birthday is <b>%%Birthday%%</b> and you are <b>%%Age%%</b> years old."\
" </p>"\
" <h2>UTF-8 Characters:</h2>"\
" <p>✔ - Check</p>"\
" </body>"\
" </html>"
message.from_email_address = EmailAddress("from@example.com", "FromMe")
message.reply_to_email_address = EmailAddress("replyto@example.com")
# Add some global merge-data
# (These will be applied to all Recipients unless specifically overridden by Recipient level merge data)
# ==========================
# Add global merge data using a dictionary
global_merge_data = {
"Motto": "When hitting the inbox matters!",
"Birthday": "unknown"
}
message.global_merge_data = global_merge_data
# Add global merge data directly to the dictionary on the message
message.global_merge_data["Age"] = "an unknown number of"
# Add global merge data using the add_global_merge_data function
message.add_global_merge_data("UpSell", "BTW: You are eligible for discount pricing when you upgrade your service!")
# Add recipients with merge data
# Including merge data on the recipient with the same name as the global merge data will override global merge data
# ==========================
# Add recipients with merge data using a dictionary
rec1_merge_data = {
"Birthday": "08/05/1991",
"Age": "27"
}
message.to_recipient.append(BulkRecipient("recipient1@example.com", merge_data=rec1_merge_data))
# Add recipients merge data directly to the dictionary
recipient2 = BulkRecipient("recipient2@example.com", "Recipient #2")
recipient2.merge_data["Birthday"] = "04/12/1984"
recipient2.merge_data["Age"] = "34"
recipient2.merge_data["UpSell"] = ""
message.add_to_recipient(recipient2)
# Add recipients merge data using the add_merge_data function
recipient3 = BulkRecipient("recipient3@example.com")
recipient3.add_merge_data("Birthday", "10/30/1978")
recipient3.add_merge_data("Age", "40")
recipient3.add_merge_data("UpSell", "")
recipient3.friendly_name = "Recipient 3"
message.add_to_recipient(recipient3)
message.add_to_recipient(BulkRecipient("recipient4@example.com", "Recipient #4"))
# Adding Attachments
# ==========================
# Add Attachment directly to the list
attachments = [
Attachment(name="bus.png", mime_type="image/png", file_path="../img/bus.png")
]
message.attachments = attachments
# Add Attachment using the add_attachment function
attachment2 = Attachment(name="bus2", mime_type="image/png", file_path="../img/bus.png")
attachment2.content_id = "bus"
message.add_attachment(attachment2)
# Add Attachment using a filePath
message.add_attachment(Attachment(file_path="../html/SimpleEmail.html"))
# Add Attachment using bytes of the file
with open("../img/bus.png", 'rb') as f:
data = f.read()
f.close()
attachment4 = Attachment(name="yellow-bus.png", mime_type="image/png", content=data)
# Add CustomHeaders to Attachment
attachment4.custom_headers.append(CustomHeader("Color", "Yellow"))
attachment4.add_custom_header("Place", "Beach")
message.add_attachment(attachment4)
# Adding Custom Headers
# ==========================
# Add CustomHeader using a list
headers = [
CustomHeader("example-type", "bulk-send-complex-example"),
CustomHeader("message-contains", "attachments, headers")
]
message.custom_headers = headers
# Add CustomHeader directly to the list
message.custom_headers.append(CustomHeader("message-has-attachments", "true"))
# Add CustomHeader using the add_custom_header function
message.add_custom_header("testMessageHeader", "I am a message header")
# get credentials from environment variables
server_id = int(os.environ.get('SOCKETLABS_SERVER_ID'))
api_key = os.environ.get('SOCKETLABS_INJECTION_API_KEY')
# create the client
client = SocketLabsClient(server_id, api_key)
# send the message
response = client.send(message)
print(json.dumps(response.to_json(), indent=2))
| 39.518717 | 117 | 0.547903 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,088 | 0.552881 |
471cbbb20475ef2a9c2237f251f9c85a33676981 | 127 | py | Python | input_tests.py | KittyKuttleFish/Python | 3bb0309d55ac63982706653b00591fc286c6deeb | [
"Unlicense"
] | null | null | null | input_tests.py | KittyKuttleFish/Python | 3bb0309d55ac63982706653b00591fc286c6deeb | [
"Unlicense"
] | null | null | null | input_tests.py | KittyKuttleFish/Python | 3bb0309d55ac63982706653b00591fc286c6deeb | [
"Unlicense"
] | null | null | null | test = input("Please enter any string for input testing:")
print("Testing String...")
import time
time.sleep(5)
if(test)
| 21.166667 | 59 | 0.692913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 63 | 0.496063 |
471ea69b41d7dcaee6304d49c046c2751ce16a2b | 8,504 | py | Python | model.py | hafezgh/music_classification | 68fa398b7d4455475d07ae17c3b6b94459a96ac7 | [
"MIT"
] | 1 | 2021-07-15T18:47:02.000Z | 2021-07-15T18:47:02.000Z | model.py | hafezgh/music_classification | 68fa398b7d4455475d07ae17c3b6b94459a96ac7 | [
"MIT"
] | null | null | null | model.py | hafezgh/music_classification | 68fa398b7d4455475d07ae17c3b6b94459a96ac7 | [
"MIT"
] | null | null | null |
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import torchvision
import torch
from torchvision import models, datasets
class CRNN_Base(nn.Module):
def __init__(self, class_num, c, h, w, k, filters, poolings, dropout_rate, gru_dropout=0.3, gru_units=32):
super(CRNN_Base, self).__init__()
input_shape = (c, h, w)
# CNN
self.bn0 = nn.BatchNorm2d(num_features=c)
self.pad1 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv1 = nn.Conv2d(c, filters[0], kernel_size=k, stride=1)
self.act1 = nn.ELU()
self.bn1 = nn.BatchNorm2d(num_features=filters[0])
self.maxPool1 = nn.MaxPool2d(kernel_size=poolings[0], stride=poolings[0])
self.drouput1 = nn.Dropout2d(dropout_rate)
self.pad2 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv2 = nn.Conv2d(filters[0], filters[1], kernel_size=k)
self.act2 = nn.ELU()
self.bn2 = nn.BatchNorm2d(num_features=filters[1])
self.maxPool2 = nn.MaxPool2d(kernel_size=poolings[1], stride=poolings[1])
self.drouput2 = nn.Dropout2d(dropout_rate)
self.pad3 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv3 = nn.Conv2d(filters[1], filters[2], kernel_size=k)
self.act3 = nn.ELU()
self.bn3 = nn.BatchNorm2d(num_features=filters[2])
self.maxPool3 = nn.MaxPool2d(kernel_size=poolings[2], stride=poolings[2])
self.drouput3 = nn.Dropout2d(dropout_rate)
self.pad4 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv4 = nn.Conv2d(filters[2], filters[3], kernel_size=k)
self.act4 = nn.ELU()
self.bn4 = nn.BatchNorm2d(num_features=filters[3])
self.maxPool4 = nn.MaxPool2d(kernel_size=poolings[3],stride=poolings[3])
self.drouput4 = nn.Dropout2d(dropout_rate)
# Output is (m, chan, freq, time) -> Needs to be reshaped for feeding to GRU units
# We will handle the reshape in the forward method
# RNN
self.gru = nn.GRU(input_size=256, hidden_size=32, batch_first=True, num_layers=2, dropout=gru_dropout)
#self.gru2 = nn.GRU(input_size=32, hidden_size=32, batch_first=True, dropout=gru_dropout)
# Dense and softmax
self.dense1 = nn.Linear(32, class_num)
self.softm = nn.Softmax(dim=-1)
def forward(self, x):
# CNN forward
x = self.bn0(x)
x = self.pad1(x)
x = self.conv1(x)
x = self.act1(x)
x = self.bn1(x)
x = self.maxPool1(x)
x = self.drouput1(x)
x = self.pad2(x)
x = self.conv2(x)
x = self.act2(x)
x = self.bn2(x)
x = self.maxPool2(x)
x = self.drouput2(x)
x = self.pad3(x)
x = self.conv3(x)
x = self.act3(x)
x = self.bn3(x)
x = self.maxPool3(x)
x = self.drouput3(x)
x = self.pad4(x)
x = self.conv4(x)
x = self.act4(x)
x = self.bn4(x)
x = self.maxPool4(x)
x = self.drouput4(x)
# Reshape
x = x.permute(0,3,2,1)
x = torch.reshape(x, (int(x.shape[0]), int(x.shape[1]), int(x.shape[2]*x.shape[3])))
# RNN forward
x = self.gru(x)[1][0]
# Dense and softmax forward
x = self.dense1(x)
x = self.softm(x)
return x
class CRNN_Larger(nn.Module):
def __init__(self, class_num, c, h, w, k, filters, poolings, dropout_rate, gru_dropout=0.3, gru_units=32):
super(CRNN_Larger, self).__init__()
input_shape = (c, h, w)
# CNN
self.bn0 = nn.BatchNorm2d(num_features=c)
self.pad1 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv1 = nn.Conv2d(c, filters[0], kernel_size=k, stride=1)
self.act1 = nn.ELU()
self.bn1 = nn.BatchNorm2d(num_features=filters[0])
self.maxPool1 = nn.MaxPool2d(kernel_size=poolings[0], stride=poolings[0])
self.drouput1 = nn.Dropout2d(dropout_rate)
self.pad2 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv2 = nn.Conv2d(filters[0], filters[1], kernel_size=k)
self.act2 = nn.ELU()
self.bn2 = nn.BatchNorm2d(num_features=filters[1])
self.maxPool2 = nn.MaxPool2d(kernel_size=poolings[1], stride=poolings[1])
self.drouput2 = nn.Dropout2d(dropout_rate)
self.pad3 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv3 = nn.Conv2d(filters[1], filters[2], kernel_size=k)
self.act3 = nn.ELU()
self.bn3 = nn.BatchNorm2d(num_features=filters[2])
self.maxPool3 = nn.MaxPool2d(kernel_size=poolings[2], stride=poolings[2])
self.drouput3 = nn.Dropout2d(dropout_rate)
self.pad4 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv4 = nn.Conv2d(filters[2], filters[3], kernel_size=k)
self.act4 = nn.ELU()
self.bn4 = nn.BatchNorm2d(num_features=filters[3])
self.maxPool4 = nn.MaxPool2d(kernel_size=poolings[3],stride=poolings[3])
self.drouput4 = nn.Dropout2d(dropout_rate)
self.pad5 = nn.ZeroPad2d((int(k/2), int(k/2), int(k/2), int(k/2)))
self.conv5 = nn.Conv2d(filters[3], filters[4], kernel_size=k)
self.act5 = nn.ELU()
self.bn5 = nn.BatchNorm2d(num_features=filters[4])
self.maxPool5 = nn.MaxPool2d(kernel_size=poolings[4],stride=poolings[4])
self.drouput5 = nn.Dropout2d(dropout_rate)
# Output is (m, chan, freq, time) -> Needs to be reshaped for feeding to GRU units
# We will handle the reshape in the forward method
# RNN
self.gru = nn.GRU(input_size=1024, hidden_size=32, batch_first=True, num_layers=2, dropout=gru_dropout)
# Dense and softmax
self.dense1 = nn.Linear(32, class_num)
self.softm = nn.Softmax(dim=-1)
def forward(self, x):
# CNN forward
x = self.bn0(x)
x = self.pad1(x)
x = self.conv1(x)
x = self.act1(x)
x = self.bn1(x)
x = self.maxPool1(x)
x = self.drouput1(x)
x = self.pad2(x)
x = self.conv2(x)
x = self.act2(x)
x = self.bn2(x)
x = self.maxPool2(x)
x = self.drouput2(x)
x = self.pad3(x)
x = self.conv3(x)
x = self.act3(x)
x = self.bn3(x)
x = self.maxPool3(x)
x = self.drouput3(x)
x = self.pad4(x)
x = self.conv4(x)
x = self.act4(x)
x = self.bn4(x)
x = self.maxPool4(x)
x = self.drouput4(x)
x = self.pad5(x)
x = self.conv5(x)
x = self.act5(x)
x = self.bn5(x)
x = self.maxPool5(x)
x = self.drouput5(x)
# Reshape
x = x.permute(0,3,2,1)
x = torch.reshape(x, (int(x.shape[0]), int(x.shape[1]), int(x.shape[2]*x.shape[3])))
# RNN forward
x = self.gru(x)[1][0]
# Dense and softmax forward
x = self.dense1(x)
x = self.softm(x)
return x
class CRNN_ResNet18(nn.Module):
def __init__(self, class_num, c, h, w, k, filters, poolings, dropout_rate, gru_dropout=0.3, gru_units=32):
# Backbone
super(CRNN_ResNet18, self).__init__()
input_shape = (c, h, w)
self.backbone = torchvision.models.resnet18(pretrained=True)
modules = list(self.backbone.children())[:-1]
self.backbone = nn.Sequential(*modules)
ct = 0
for child in self.backbone.children():
ct += 1
if ct < 7:
for param in child.parameters():
param.requires_grad = False
# RNN
self.gru = nn.GRU(input_size=512, hidden_size=32, batch_first=True, num_layers=3, dropout=gru_dropout)
#self.gru2 = nn.GRU(input_size=32, hidden_size=32, batch_first=True, dropout=gru_dropout)
# Dense and softmax
self.dense1 = nn.Linear(32, class_num)
self.softm = nn.Softmax(dim=-1)
def forward(self, x):
# Backbone forward
x = self.backbone(x)
# Reshape
x = x.permute(0,3,2,1)
x = torch.reshape(x, (int(x.shape[0]), int(x.shape[1]), int(x.shape[2]*x.shape[3])))
# RNN forward
x = self.gru(x)[1][0]
# Dense and softmax forward
x = self.dense1(x)
x = self.softm(x)
return x | 36.497854 | 111 | 0.577493 | 8,287 | 0.974483 | 0 | 0 | 0 | 0 | 0 | 0 | 725 | 0.085254 |
47209eebbc149e62a5f0d2dabf52ddf2204e7525 | 1,036 | py | Python | tests/utils/test_string_utils.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 10,351 | 2018-07-31T02:52:49.000Z | 2022-03-31T23:33:13.000Z | tests/utils/test_string_utils.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 3,733 | 2018-07-31T01:38:51.000Z | 2022-03-31T23:56:25.000Z | tests/utils/test_string_utils.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 2,596 | 2018-07-31T06:38:39.000Z | 2022-03-31T23:56:32.000Z | import pytest
from mlflow.utils.string_utils import strip_prefix, strip_suffix, is_string_type
@pytest.mark.parametrize(
"original,prefix,expected",
[("smoketest", "smoke", "test"), ("", "test", ""), ("", "", ""), ("test", "", "test")],
)
def test_strip_prefix(original, prefix, expected):
assert strip_prefix(original, prefix) == expected
@pytest.mark.parametrize(
"original,suffix,expected",
[("smoketest", "test", "smoke"), ("", "test", ""), ("", "", ""), ("test", "", "test")],
)
def test_strip_suffix(original, suffix, expected):
assert strip_suffix(original, suffix) == expected
def test_is_string_type():
assert is_string_type("validstring")
assert is_string_type("")
assert is_string_type((b"dog").decode("utf-8"))
assert not is_string_type(None)
assert not is_string_type(["teststring"])
assert not is_string_type([])
assert not is_string_type({})
assert not is_string_type({"test": "string"})
assert not is_string_type(12)
assert not is_string_type(12.7)
| 31.393939 | 91 | 0.666023 | 0 | 0 | 0 | 0 | 512 | 0.494208 | 0 | 0 | 214 | 0.206564 |
472102e62d417732e12272e8b4c6fc0b9a1fb145 | 352 | py | Python | api/migrations/0002_auto_20181108_2243.py | apigram/jade-api | 1aece29c3109db68897fdf854be431554e7f2863 | [
"Apache-2.0"
] | null | null | null | api/migrations/0002_auto_20181108_2243.py | apigram/jade-api | 1aece29c3109db68897fdf854be431554e7f2863 | [
"Apache-2.0"
] | null | null | null | api/migrations/0002_auto_20181108_2243.py | apigram/jade-api | 1aece29c3109db68897fdf854be431554e7f2863 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.1.3 on 2018-11-08 11:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='company',
old_name='contact',
new_name='contacts',
),
]
| 18.526316 | 47 | 0.571023 | 267 | 0.758523 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.267045 |
47213f9e5edf66ce4e4334e6890171cc22696a9b | 4,523 | py | Python | argueserver/plugins/arguments.py | UKPLab/argotario | cb817460e2c1cae1d2edb86eeb3ab238c592de4b | [
"Apache-2.0"
] | 8 | 2017-12-13T18:41:51.000Z | 2021-12-14T20:56:33.000Z | argueserver/plugins/arguments.py | UKPLab/argotario | cb817460e2c1cae1d2edb86eeb3ab238c592de4b | [
"Apache-2.0"
] | 1 | 2019-12-18T03:02:27.000Z | 2019-12-23T19:06:07.000Z | argueserver/plugins/arguments.py | UKPLab/argotario | cb817460e2c1cae1d2edb86eeb3ab238c592de4b | [
"Apache-2.0"
] | 2 | 2017-08-14T13:24:16.000Z | 2021-02-16T08:52:22.000Z | import interfaces
import random
import pymongo
class Arguments(interfaces.Plugin):
"""
The plugin for the collection arguments
"""
#follows the links and resolves them
def kraken(self, arg):
"""
get all linked documents and append them to the argument, such as the topic, the domain, the true fallacy-type.
"""
fallacies = self.plugins['fallacies']
if not '&refersTo' in arg:
if 'out_refersTo' in arg:
topic = self.plugins['topics'].get_by_arg({'arg' : arg});
arg['&refersTo'] = [topic];
if not '&fallacyType' in arg:
if 'out_fallacyType' in arg:
fallacyType = fallacies.get_fallacy_for_arg(arg)
arg['&fallacyType'] = fallacyType
return arg;
def get_by_topic(self, t_id):
"""
get all arguments belonging to a topic with given id.
"""
return self.arguments.find({'out_refersTo' : t_id})
def get_voted(self, field):
"""
get all arguments with existing voting on a given field.
"""
return self.arguments.find({'votings.' + field : {'$exists' : True}})
def input_filter(self, doc):
"""
filter which gets applied before an argument-document is inserted into the database.
If this filter returns false, the argument is not inserted.
"""
required_fields = ['out_refersTo', 'fallacyId', 'components', 'stance']
for field in required_fields:
if field not in doc:
return False
if not doc[field]:
return False
components = doc['components']
if len(components) < 1:
return False
for comp in components:
if not ('type' in comp and 'body' in comp):
return False
if not (comp['type'] and comp['body']):
return False
topic = self.plugins['topics'].get_by_id({'id' : doc['out_refersTo']}, kraken=False);
if topic is None:
return False
return True
#PUBLIC ENDPOINT
def fallacy_recognition(self, p):
"""
method called for the fallacy-recognition round.
returns an argument with a fallacy-type matching the params difficulty and context
"""
fallacies = None
args = None
difficulty = None
if 'language' in p:
language = p['language']
else:
language = 'en'
#get fallacious arguments of type with <= difficulty
#and fallacies with matching difficulty
if 'difficulty' in p:
_filter = {'operator': '<=', 'difficulty': p['difficulty']}
if 'context' in p and p['context'] is False:
_filter['context'] = None
fallacies = self.plugins['fallacies'].get_by_difficulty(_filter);
args = self.arguments.find({"out_language": language, "out_fallacyType" : {"$in" : fallacies.distinct('_id')}})
#get any argument which is supposed to be fallacious
#and all fallacies, because the args can be of any type
else:
_filter = {"out_language": language, "fallacyId" : {"$ne" : None}}
if 'context' in p and p['context'] is False:
_filter['context'] = None
fallacies = self.plugins['fallacies'].fallacies.find({'context' : None});
else:
fallacies = self.plugins['fallacies'].get_all();
args = self.arguments.find(_filter)
nr_args = args.count()
if nr_args == 0:
return None
arg = args[random.randrange(0, nr_args)]
if 'context' in arg and arg['context'] is True:
sessionP = self.plugins['sessions']
session = sessionP.get_by_arg(arg['_id'])
session['&args'] = []
session['&allFallacies'] = []
for a_id in session['out_argument'][:session['out_argument'].index(arg['_id'])]:
session['&args'].append(self.arguments.find_one({'_id' : a_id}))
session = sessionP.kraken(session)
arg['in_session'] = session
else:
self.kraken(arg)
arg['&allFallacies'] = list(fallacies)
return arg;
def getHandler(database):
"""
a function instantiating and returning this plugin
"""
return Arguments(database, 'arguments', public_endpoint_extensions=['fallacy_recognition', 'insert'])
| 34.265152 | 124 | 0.569755 | 4,267 | 0.9434 | 0 | 0 | 0 | 0 | 0 | 0 | 1,655 | 0.365908 |
47228a6a12c88d7a3cfa15a2f732624f6eded042 | 2,245 | py | Python | predictions/dota_predictor.py | LavinaVRovine/hazard | e0408374dc0b76f8b9a0107f5f12cca2d4c033ef | [
"MIT"
] | 1 | 2020-10-05T14:19:35.000Z | 2020-10-05T14:19:35.000Z | predictions/dota_predictor.py | LavinaVRovine/hazard | e0408374dc0b76f8b9a0107f5f12cca2d4c033ef | [
"MIT"
] | null | null | null | predictions/dota_predictor.py | LavinaVRovine/hazard | e0408374dc0b76f8b9a0107f5f12cca2d4c033ef | [
"MIT"
] | null | null | null | import pandas as pd
from sqlalchemy import create_engine
from config import DATABASE_URI
from predictions.common_predictor import CommonPredictor
pd.set_option("display.width", 1000)
pd.set_option("display.max_columns", 50)
class DotaPredictor(CommonPredictor):
def __init__(self, debug: bool = False):
super().__init__(debug=debug)
self.training_columns = [
"kills",
"deaths",
"assists",
"worth",
"last_hits",
"denies",
"gold_min",
"xp_min",
"dmg_heroes",
"healing",
"dmg_buildings",
"total_win_pct",
"c_kills",
"c_deaths",
"c_assists",
"c_worth",
"c_last_hits",
"c_denies",
"c_gold_min",
"c_xp_min",
"c_dmg_heroes",
"c_healing",
"c_dmg_buildings",
"c_total_win_pct",
]
self.y_col_name = "win" #'t1_winner'
if __name__ == "__main__":
DB_URL = f"{DATABASE_URI}dota"
ENGINE = create_engine(DB_URL)
df = pd.read_sql_table("match_stats_all", con=ENGINE)
# spocitam jaky je winrate mezi teamy.
totals = df.groupby(["t1_id", "t2_id"])["t1_id"].count()
wins = df[df["t1_winner"] == True].groupby(["t1_id", "t2_id"])["t1_id"].count()
win_pcts = wins.divide(totals).reset_index(name="winrate").fillna(0)
win_pcts["win"] = win_pcts["winrate"] >= 0.5
df = df.drop("t1_winner", axis=1).drop_duplicates()
df["joinon"] = df[["t1_id", "t2_id"]].astype(str).apply("-".join, 1)
win_pcts["joinon"] = win_pcts[["t1_id", "t2_id"]].astype(str).apply("-".join, 1)
df = pd.merge(df, win_pcts, on="joinon")
df.drop(
["t1_id_x", "t2_id_x", "t1_id_y", "t2_id_y", "joinon", "winrate"],
axis=1,
inplace=True,
)
# df.drop_duplicates()
# y = df.pop("t1_winner")
df.fillna(0, inplace=True)
import mlflow
from config import ROOT_DIR
mlflow.set_tracking_uri(f"file:///{ROOT_DIR}/mlruns")
mlflow.set_experiment("hazard_dota")
pred = DotaPredictor(debug=False)
pred.main_train(df, run_name="save run", n_runs=50)
print()
| 28.782051 | 84 | 0.571938 | 808 | 0.359911 | 0 | 0 | 0 | 0 | 0 | 0 | 697 | 0.310468 |
4722d1369c95c041c606bcb2874aaeb9b35e80f8 | 5,646 | py | Python | sphinxcontrib/gitloginfo/__init__.py | TYPO3-Documentation/sphinxcontrib-gitloginfo | 5f0ea2e3c69cde680f7431843e41ce598088f04c | [
"MIT"
] | null | null | null | sphinxcontrib/gitloginfo/__init__.py | TYPO3-Documentation/sphinxcontrib-gitloginfo | 5f0ea2e3c69cde680f7431843e41ce598088f04c | [
"MIT"
] | 1 | 2021-11-28T10:28:16.000Z | 2021-11-29T08:59:43.000Z | sphinxcontrib/gitloginfo/__init__.py | TYPO3-Documentation/sphinxcontrib-gitloginfo | 5f0ea2e3c69cde680f7431843e41ce598088f04c | [
"MIT"
] | null | null | null | """
sphinxcontrib.gitloginfo
~~~~~~~~~~~~~~~~~~~~~~~~
Provide properties obtained from git log
:copyright: Copyright 2020 by Martin Bless <martin.bless@mbless.de>
:license: MIT, see LICENSE for details.
"""
import datetime
import io
import json
import sys
from os.path import exists as ospe, join as ospj
from sphinx.util import i18n, logging
from sphinxcontrib.gitloginfo.version import __version__
log = logging.getLogger(__name__)
PY2 = sys.version_info[0] == 2
wd = workdata = {}
if PY2:
class UtcTzinfo(datetime.tzinfo):
"""UTC"""
ZERO = datetime.timedelta(0)
def utcoffset(self, dt):
return self.ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return self.ZERO
utc_tzinfo = UtcTzinfo()
else:
utc_tzinfo = datetime.timezone.utc
def _html_page_context(app, pagename, templatename, context, doctree):
if 'sourcename' in context and 'page_source_suffix' in context:
pass
else:
return
pagename_as_repofile = ospj(wd['project_offset'],
wd['t3docdir'],
pagename + context['page_source_suffix'])
v = wd['filedata'].get(pagename_as_repofile, (None, None))
timestamp = v[0]
commit_hash = v[1]
if timestamp is None:
log.info("[%s] %s :: not found" % (__name__, pagename_as_repofile))
return
else:
log.info("[%s] %s :: found" % (__name__, pagename_as_repofile))
last_modified_dt = datetime.datetime.fromtimestamp(timestamp, utc_tzinfo)
last_modified = i18n.format_date(wd['html_last_updated_fmt'],
date=last_modified_dt,
language=app.config.language)
# Try to assemble or guess a commit url
commit_url_template = context.get('theme_project_commit_url')
if not commit_url_template:
# Maybe we can deduce the url needed
repo_url = (context.get('theme_project_repository') or
context.get('theme_project_issues'))
if repo_url:
# Github
if repo_url.startswith('https://github.com/'):
if repo_url.endswith('/issues'):
repo_url = repo_url[:-7]
elif repo_url.endswith('.git'):
repo_url = repo_url[:-4]
commit_url_template = repo_url + '/commit/%(commit_hash)s'
# git.typo3.org
elif repo_url.startswith('https://git.typo3.org/'):
if repo_url.endswith('/issues'):
repo_url = repo_url[:-7]
commit_url_template = repo_url + '/commit/%(commit_hash)s'
# bitbucket
elif repo_url.startswith('https://bitbucket.org/'):
if repo_url.endswith('/src/master/'):
repo_url = repo_url[:-12]
commit_url_template = repo_url + '/commits/%(commit_hash)s'
# gitlab
elif "gitlab" in repo_url[:repo_url[8:].find('/')+8]:
if repo_url.endswith('/-/issues'):
repo_url = repo_url[:-9]
commit_url_template = repo_url + '/-/commit/%(commit_hash)s'
t3ctx = context['t3ctx'] = context.get('t3ctx', {})
t3ctx['commit_hash'] = commit_hash
t3ctx['last_modified'] = last_modified
t3ctx['last_modified_isoformat'] = last_modified_dt.isoformat()
if commit_url_template:
t3ctx['commit_url'] = commit_url_template % {'commit_hash': commit_hash}
def _config_inited(app, config):
v = getattr(app.config, 'html_last_updated_fmt', None)
wd['html_last_updated_fmt'] = v if v else '%b %d, %Y %H:%M'
def setup(app):
"""Sphinx extension entry point."""
app.require_sphinx('1.8') # For "config-inited" event
buildsettings_jsonfile = ospj(app.confdir, 'buildsettings.json')
if ospe(buildsettings_jsonfile):
# just collect knowledge
wd['buildsettings_jsonfile'] = buildsettings_jsonfile
with io.open(buildsettings_jsonfile, 'r', encoding='utf-8') as f1:
wd['buildsettings'] = json.load(f1)
log.info("[%s] app.confdir/buildsettings.json :: found" % (__name__,))
else:
log.info("[%s] app.confdir/buildsettings.json :: not found" % (__name__,))
gitloginfo_jsonfile = ospj(app.confdir, 'gitloginfo.json')
if ospe(gitloginfo_jsonfile):
# just collect knowledge
wd['gitloginfo_jsonfile'] = gitloginfo_jsonfile
with io.open(gitloginfo_jsonfile, 'r', encoding='utf-8') as f1:
wd['gitloginfo'] = json.load(f1)
log.info("[%s] app.confdir/gitloginfo.json :: found" % (__name__,))
wd['filedata'] = wd['gitloginfo'].get('filedata', {})
wd['project_offset'] = (wd['gitloginfo']['abspath_to_project']
[len(wd['gitloginfo']['abspath_to_repo']):]
.strip('/'))
wd['t3docdir'] = wd.get('buildsettings', {}).get('t3docdir',
'Documentation')
else:
log.info("[%s] app.confdir/gitloginfo.json :: not found" % (__name__,))
if wd.get('filedata'):
# only connect if there is something to do
app.connect('html-page-context', _html_page_context)
app.connect('config-inited', _config_inited)
log.info("[%s] filedata found" % (__name__,))
else:
log.info("[%s] filedata not found" % (__name__,))
return {
'parallel_read_safe': True,
'parallel_write_safe': True,
'version': __version__,
}
| 37.390728 | 82 | 0.589798 | 265 | 0.046936 | 0 | 0 | 0 | 0 | 0 | 0 | 1,745 | 0.309068 |
4722f796a55af95014f5b5b2803786e73346c8bc | 22,314 | py | Python | train/train.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | train/train.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | train/train.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | import argparse
import torch.distributed as dist
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
import test # import test.py to get mAP after each epoch
from models import *
from utils.dataset import *
from utils.utils import *
from mymodel import *
wdir = 'weights' + os.sep # weights dir
last = wdir + 'last.pt'
best = wdir + 'best.pt'
test_best = wdir + 'test_best.pt'
results_file = 'results.txt'
# Hyperparameters (results68: 59.9 mAP@0.5 yolov3-spp-416) https://github.com/ultralytics/yolov3/issues/310
hyp = {'giou': 3.54, # giou loss gain
'cls': 37.4, # cls loss gain
'cls_pw': 1.0, # cls BCELoss positive_weight
'obj': 64.3, # obj loss gain (*=img_size/320 if img_size != 320)
'obj_pw': 1.0, # obj BCELoss positive_weight
'iou_t': 0.225, # iou training threshold
'lr0': 0.01, # initial learning rate (SGD=5E-3, Adam=5E-4)
'lrf': -4., # final LambdaLR learning rate = lr0 * (10 ** lrf)
'momentum': 0.937, # SGD momentum
'weight_decay': 0.000484, # optimizer weight decay
'fl_gamma': 0.5, # focal loss gamma
'hsv_h': 0.0138, # image HSV-Hue augmentation (fraction)
'hsv_s': 0.678, # image HSV-Saturation augmentation (fraction)
'hsv_v': 0.36, # image HSV-Value augmentation (fraction)
'degrees': 1.98, # image rotation (+/- deg)
'translate': 0.05, # image translation (+/- fraction)
'scale': 0.05, # image scale (+/- gain)
'shear': 0.641} # image shear (+/- deg)
# Overwrite hyp with hyp*.txt (optional)
f = glob.glob('hyp*.txt')
if f:
print('Using %s' % f[0])
for k, v in zip(hyp.keys(), np.loadtxt(f[0])):
hyp[k] = v
def train():
cfg = opt.cfg
data = opt.data
img_size, img_size_test = opt.img_size if len(opt.img_size) == 2 else opt.img_size * 2 # train, test sizes
epochs = opt.epochs # 500200 batches at bs 64, 117263 images = 273 epochs
batch_size = opt.batch_size
accumulate = opt.accumulate # effective bs = batch_size * accumulate = 16 * 4 = 64
weights = opt.weights # initial training weights
# Initialize
init_seeds()
if opt.multi_scale:
img_sz_min = round(img_size / 32 / 1.5)
img_sz_max = round(img_size / 32* 1.5)
img_size = img_sz_max * 32 # initiate with maximum multi_scale size
print('Using multi-scale %g - %g' % (img_sz_min * 32, img_size))
# Configure run
# data_dict = parse_data_cfg(data)
train_path = '../data/data_training'
test_path = '../data/data_test'
nc = 1
# Remove previous results
for f in glob.glob('*_batch*.png') + glob.glob(results_file):
os.remove(f)
# Initialize model
# model = Darknet(cfg, arc=opt.arc).to(device)
# model = UltraNetFloat640().to(device)
# model = TempNet().to(device)
# model = TempNetDW().to(device)
# model = TempNetQua().to(device)
# model = SqueezeNetQua().to(device)
# model = UltraNet().to(device)
model = TinyUltraNet().to(device)
# Optimizer
pg0, pg1, pg2 = [], [], [] # optimizer parameter groups
for k, v in dict(model.named_parameters()).items():
if '.bias' in k:
pg2 += [v] # biases
elif 'Conv2d.weight' in k:
pg1 += [v] # apply weight_decay
else:
pg0 += [v] # all else
if opt.adam:
# hyp['lr0'] *= 0.1 # reduce lr (i.e. SGD=5E-3, Adam=5E-4)
optimizer = optim.Adam(pg0, lr=hyp['lr0'])
# optimizer = AdaBound(pg0, lr=hyp['lr0'], final_lr=0.1)
else:
optimizer = optim.SGD(pg0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True)
optimizer.add_param_group({'params': pg1, 'weight_decay': hyp['weight_decay']}) # add pg1 with weight_decay
optimizer.add_param_group({'params': pg2}) # add pg2 (biases)
optimizer.param_groups[2]['lr'] *= 2.0 # bias lr
del pg0, pg1, pg2
start_epoch = 0
best_fitness = 0.0
test_best_iou = 0.0
# attempt_download(weights)
# 加载权重
if weights.endswith('.pt'): # pytorch format
# possible weights are '*.pt', 'yolov3-spp.pt', 'yolov3-tiny.pt' etc.
chkpt = torch.load(weights, map_location=device)
# load model
try:
chkpt['model'] = {k: v for k, v in chkpt['model'].items() if model.state_dict()[k].numel() == v.numel()}
model.load_state_dict(chkpt['model'], strict=False)
except KeyError as e:
s = "%s is not compatible with %s. Specify --weights '' or specify a --cfg compatible with %s. " % (opt.weights, opt.cfg, opt.weights)
raise KeyError(s) from e
# load optimizer
if chkpt['optimizer'] is not None:
optimizer.load_state_dict(chkpt['optimizer'])
best_fitness = chkpt['best_fitness']
# load results
if chkpt.get('training_results') is not None:
with open(results_file, 'w') as file:
file.write(chkpt['training_results']) # write results.txt
start_epoch = chkpt['epoch'] + 1
del chkpt
elif len(weights) > 0: # darknet format
# possible weights are '*.weights', 'yolov3-tiny.conv.15', 'darknet53.conv.74' etc.
load_darknet_weights(model, weights)
# Scheduler https://github.com/ultralytics/yolov3/issues/238
# lf = lambda x: 1 - x / epochs # linear ramp to zero
# lf = lambda x: 10 ** (hyp['lrf'] * x / epochs) # exp ramp
# lf = lambda x: 1 - 10 ** (hyp['lrf'] * (1 - x / epochs)) # inverse exp ramp
lf = lambda x: (1 + math.cos(x * math.pi / epochs)) / 2 * 0.99 + 0.01 # cosine https://arxiv.org/pdf/1812.01187.pdf
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
# scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[round(epochs * x) for x in [0.8, 0.9]], gamma=0.1)
scheduler.last_epoch = start_epoch
# # Plot lr schedule
# y = []
# for _ in range(epochs):
# scheduler.step()
# y.append(optimizer.param_groups[0]['lr'])
# plt.plot(y, '.-', label='LambdaLR')
# plt.xlabel('epoch')
# plt.ylabel('LR')
# plt.tight_layout()
# plt.savefig('LR.png', dpi=300)
# Initialize distributed training
if device.type != 'cpu' and torch.cuda.device_count() > 1 and torch.distributed.is_available():
dist.init_process_group(backend='nccl', # 'distributed backend'
init_method='tcp://127.0.0.1:9999', # distributed training init method
world_size=1, # number of nodes for distributed training
rank=0) # distributed training node rank
model = torch.nn.parallel.DistributedDataParallel(model, find_unused_parameters=True)
model.yolo_layers = model.module.yolo_layers # move yolo layer indices to top level
# Dataset
dataset = LoadImagesAndLabels(train_path, img_size, batch_size,
augment=True,
hyp=hyp, # augmentation hyperparameters
rect=opt.rect, # rectangular training
cache_images=opt.cache_images,
single_cls=opt.single_cls)
# Dataloader
batch_size = min(batch_size, len(dataset))
nw = min([os.cpu_count(), batch_size if batch_size > 1 else 0, 8]) # number of workers
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
num_workers=nw,
shuffle=not opt.rect, # Shuffle=True unless rectangular training is used
pin_memory=True,
collate_fn=dataset.collate_fn)
# Testloader
testloader = torch.utils.data.DataLoader(LoadImagesAndLabels(test_path, img_size_test, batch_size * 2,
hyp=hyp,
rect=False,
cache_images=opt.cache_images,
single_cls=opt.single_cls),
batch_size=batch_size * 2,
num_workers=nw,
pin_memory=True,
collate_fn=dataset.collate_fn)
# Start training
nb = len(dataloader)
prebias = start_epoch == 0
model.nc = nc # attach number of classes to model
model.arc = opt.arc # attach yolo architecture
model.hyp = hyp # attach hyperparameters to model
model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights
maps = np.zeros(nc) # mAP per class
# torch.autograd.set_detect_anomaly(True)
results = (0, 0, 0, 0, 0, 0, 0) # 'P', 'R', 'mAP', 'F1', 'val GIoU', 'val Objectness', 'val Classification'
t0 = time.time()
torch_utils.model_info(model, report='summary') # 'full' or 'summary'
print('Using %g dataloader workers' % nw)
print('Starting training for %g epochs...' % epochs)
for epoch in range(start_epoch, epochs): # epoch ------------------------------------------------------------------
model.train()
model.gr = 1 - (1 + math.cos(min(epoch * 2, epochs) * math.pi / epochs)) / 2 # GIoU <-> 1.0 loss ratio
# Prebias
if prebias:
ne = max(round(30 / nb), 3) # number of prebias epochs
ps = np.interp(epoch, [0, ne], [0.1, hyp['lr0'] * 2]), \
np.interp(epoch, [0, ne], [0.9, hyp['momentum']]) # prebias settings (lr=0.1, momentum=0.9)
if epoch == ne:
# print_model_biases(model)
prebias = False
# Bias optimizer settings
optimizer.param_groups[2]['lr'] = ps[0]
if optimizer.param_groups[2].get('momentum') is not None: # for SGD but not Adam
optimizer.param_groups[2]['momentum'] = ps[1]
mloss = torch.zeros(4).to(device) # mean losses
print(('\n' + '%10s' * 8) % ('Epoch', 'gpu_mem', 'GIoU', 'obj', 'cls', 'total', 'targets', 'img_size'))
pbar = tqdm(enumerate(dataloader), total=nb) # progress bar
for i, (imgs, targets, paths, _) in pbar: # batch -------------------------------------------------------------
ni = i + nb * epoch # number integrated batches (since train start)
imgs = imgs.to(device).float() / 255.0 # uint8 to float32, 0 - 255 to 0.0 - 1.0
targets = targets.to(device)
# Plot images with bounding boxes
if ni < 1:
f = 'train_batch%g.png' % i # filename
plot_images(imgs=imgs, targets=targets, paths=paths, fname=f)
if tb_writer:
tb_writer.add_image(f, cv2.imread(f)[:, :, ::-1], dataformats='HWC')
# Multi-Scale training
if opt.multi_scale:
if ni / accumulate % 1 == 0: # adjust img_size (67% - 150%) every 1 batch
img_size = random.randrange(img_sz_min, img_sz_max + 1) * 32
sf = img_size / max(imgs.shape[2:]) # scale factor
if sf != 1:
ns = [math.ceil(x * sf / 32.) * 32 for x in imgs.shape[2:]] # new shape (stretched to 16-multiple)
imgs = F.interpolate(imgs, size=ns, mode='bilinear', align_corners=False)
# Run model
pred = model(imgs)
# Compute loss
loss, loss_items = compute_loss(pred, targets, model)
if not torch.isfinite(loss):
print('WARNING: non-finite loss, ending training ', loss_items)
return results
# Scale loss by nominal batch_size of 64
loss *= batch_size / 64
loss.backward()
# Optimize accumulated gradient
if ni % accumulate == 0:
optimizer.step()
optimizer.zero_grad()
# Print batch results
mloss = (mloss * i + loss_items) / (i + 1) # update mean losses
mem = '%.3gG' % (torch.cuda.memory_cached() / 1E9 if torch.cuda.is_available() else 0) # (GB)
s = ('%10s' * 2 + '%10.3g' * 6) % ('%g/%g' % (epoch, epochs - 1), mem, *mloss, len(targets), img_size)
pbar.set_description(s)
# end batch ------------------------------------------------------------------------------------------------
# Update scheduler
scheduler.step()
# Process epoch results
final_epoch = epoch + 1 == epochs
if not opt.notest or final_epoch: # Calculate mAP
is_coco = any([x in data for x in ['coco.data', 'coco2014.data', 'coco2017.data']]) and model.nc == 80
results = test.test(cfg,
data,
batch_size=batch_size * 2,
img_size=img_size_test,
model=model,
conf_thres=0.001, # 0.001 if opt.evolve or (final_epoch and is_coco) else 0.01,
iou_thres=0.6,
save_json=final_epoch and is_coco,
single_cls=opt.single_cls,
dataloader=testloader)
# Write epoch results
with open(results_file, 'a') as f:
f.write(s + '%10.3g' * len(results) % results + '\n') # P, R, mAP, F1, test_losses=(GIoU, obj, cls)
if len(opt.name) and opt.bucket:
os.system('gsutil cp results.txt gs://%s/results/results%s.txt' % (opt.bucket, opt.name))
# Write Tensorboard results
if tb_writer:
x = list(mloss) + list(results)
titles = ['GIoU', 'Objectness', 'Classification', 'Train loss',
'iou', 'loss', 'Giou loss', 'obj loss']
for xi, title in zip(x, titles):
tb_writer.add_scalar(title, xi, epoch)
# Update best mAP
fi = fitness(np.array(results).reshape(1, -1)) # fitness_i = weighted combination of [P, R, mAP, F1]
if fi > best_fitness:
best_fitness = fi
test_iou = results[0]
if test_iou > test_best_iou:
test_best_iou = test_iou
# Save training results
save = (not opt.nosave) or (final_epoch and not opt.evolve)
if save:
with open(results_file, 'r') as f:
# Create checkpoint
chkpt = {'epoch': epoch,
'best_fitness': best_fitness,
'training_results': f.read(),
'model': model.module.state_dict() if type(
model) is nn.parallel.DistributedDataParallel else model.state_dict(),
'optimizer': None if final_epoch else optimizer.state_dict()}
# Save last checkpoint
torch.save(chkpt, last)
# Save best checkpoint
if best_fitness == fi:
torch.save(chkpt, best)
if test_iou == test_best_iou:
torch.save(chkpt, test_best)
# Save backup every 10 epochs (optional)
# if epoch > 0 and epoch % 10 == 0:
# torch.save(chkpt, wdir + 'backup%g.pt' % epoch)
# Delete checkpoint
del chkpt
# end epoch ----------------------------------------------------------------------------------------------------
# end training
n = opt.name
if len(n):
n = '_' + n if not n.isnumeric() else n
fresults, flast, fbest = 'results%s.txt' % n, 'last%s.pt' % n, 'best%s.pt' % n
os.rename('results.txt', fresults)
os.rename(wdir + 'last.pt', wdir + flast) if os.path.exists(wdir + 'last.pt') else None
os.rename(wdir + 'best.pt', wdir + fbest) if os.path.exists(wdir + 'best.pt') else None
if opt.bucket: # save to cloud
os.system('gsutil cp %s gs://%s/results' % (fresults, opt.bucket))
os.system('gsutil cp %s gs://%s/weights' % (wdir + flast, opt.bucket))
# os.system('gsutil cp %s gs://%s/weights' % (wdir + fbest, opt.bucket))
if not opt.evolve:
plot_results() # save as results.png
print('%g epochs completed in %.3f hours.\n' % (epoch - start_epoch + 1, (time.time() - t0) / 3600))
dist.destroy_process_group() if torch.cuda.device_count() > 1 else None
torch.cuda.empty_cache()
return results
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=500) # 500200 batches at bs 16, 117263 COCO images = 273 epochs
parser.add_argument('--batch-size', type=int, default=16) # effective bs = batch_size * accumulate = 16 * 4 = 64
parser.add_argument('--accumulate', type=int, default=4, help='batches to accumulate before optimizing')
parser.add_argument('--cfg', type=str, default='cfg/yolov3-tiny-1cls_1.cfg', help='*.cfg path')
parser.add_argument('--data', type=str, default='data/coco2017.data', help='*.data path')
parser.add_argument('--multi-scale', action='store_true', help='adjust (67% - 150%) img_size every 10 batches')
parser.add_argument('--img-size', nargs='+', type=int, default=[320], help='train and test image-sizes')
parser.add_argument('--rect', action='store_true', help='rectangular training')
parser.add_argument('--resume', action='store_true', help='resume training from last.pt')
parser.add_argument('--nosave', action='store_true', help='only save final checkpoint')
parser.add_argument('--notest', action='store_true', help='only test final epoch')
parser.add_argument('--evolve', action='store_true', help='evolve hyperparameters')
parser.add_argument('--bucket', type=str, default='', help='gsutil bucket')
parser.add_argument('--cache-images', action='store_true', help='cache images for faster training')
parser.add_argument('--weights', type=str, default='', help='initial weights path')
parser.add_argument('--arc', type=str, default='default', help='yolo architecture') # default, uCE, uBCE
parser.add_argument('--name', default='', help='renames results.txt to results_name.txt if supplied')
parser.add_argument('--device', default='', help='device id (i.e. 0 or 0,1 or cpu)')
parser.add_argument('--adam', action='store_true', help='use adam optimizer')
parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset')
parser.add_argument('--var', type=float, help='debug variable')
opt = parser.parse_args()
opt.weights = last if opt.resume else opt.weights
print(opt)
device = torch_utils.select_device(opt.device, batch_size=opt.batch_size)
# scale hyp['obj'] by img_size (evolved at 320)
# hyp['obj'] *= opt.img_size[0] / 320.
tb_writer = None
if not opt.evolve: # Train normally
try:
# Start Tensorboard with "tensorboard --logdir=runs", view at http://localhost:6006/
from torch.utils.tensorboard import SummaryWriter
tb_writer = SummaryWriter()
except:
pass
train() # train normally
else: # Evolve hyperparameters (optional)
opt.notest, opt.nosave = True, True # only test/save final epoch
if opt.bucket:
os.system('gsutil cp gs://%s/evolve.txt .' % opt.bucket) # download evolve.txt if exists
for _ in range(1): # generations to evolve
if os.path.exists('evolve.txt'): # if evolve.txt exists: select best hyps and mutate
# Select parent(s)
parent = 'single' # parent selection method: 'single' or 'weighted'
x = np.loadtxt('evolve.txt', ndmin=2)
n = min(5, len(x)) # number of previous results to consider
x = x[np.argsort(-fitness(x))][:n] # top n mutations
w = fitness(x) - fitness(x).min() # weights
if parent == 'single' or len(x) == 1:
# x = x[random.randint(0, n - 1)] # random selection
x = x[random.choices(range(n), weights=w)[0]] # weighted selection
elif parent == 'weighted':
x = (x * w.reshape(n, 1)).sum(0) / w.sum() # weighted combination
# Mutate
method, mp, s = 3, 0.9, 0.2 # method, mutation probability, sigma
npr = np.random
npr.seed(int(time.time()))
g = np.array([1, 1, 1, 1, 1, 1, 1, 0, .1, 1, 0, 1, 1, 1, 1, 1, 1, 1]) # gains
ng = len(g)
if method == 1:
v = (npr.randn(ng) * npr.random() * g * s + 1) ** 2.0
elif method == 2:
v = (npr.randn(ng) * npr.random(ng) * g * s + 1) ** 2.0
elif method == 3:
v = np.ones(ng)
while all(v == 1): # mutate until a change occurs (prevent duplicates)
# v = (g * (npr.random(ng) < mp) * npr.randn(ng) * s + 1) ** 2.0
v = (g * (npr.random(ng) < mp) * npr.randn(ng) * npr.random() * s + 1).clip(0.3, 3.0)
for i, k in enumerate(hyp.keys()): # plt.hist(v.ravel(), 300)
hyp[k] = x[i + 7] * v[i] # mutate
# Clip to limits
keys = ['lr0', 'iou_t', 'momentum', 'weight_decay', 'hsv_s', 'hsv_v', 'translate', 'scale', 'fl_gamma']
limits = [(1e-5, 1e-2), (0.00, 0.70), (0.60, 0.98), (0, 0.001), (0, .9), (0, .9), (0, .9), (0, .9), (0, 3)]
for k, v in zip(keys, limits):
hyp[k] = np.clip(hyp[k], v[0], v[1])
# Train mutation
results = train()
# Write mutation results
print_mutation(hyp, results, opt.bucket)
# Plot results
# plot_evolution_results(hyp)
| 47.375796 | 146 | 0.54589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8,164 | 0.365738 |
4722f8068271144abbf5c6184025fd0b5c1933a0 | 4,422 | py | Python | client.py | BrenndonCJ/GUI_Chat_MultiClient_Python | 491105ed260ce28a3b95bb56feff43a0900cd765 | [
"MIT"
] | null | null | null | client.py | BrenndonCJ/GUI_Chat_MultiClient_Python | 491105ed260ce28a3b95bb56feff43a0900cd765 | [
"MIT"
] | null | null | null | client.py | BrenndonCJ/GUI_Chat_MultiClient_Python | 491105ed260ce28a3b95bb56feff43a0900cd765 | [
"MIT"
] | null | null | null | import sys
from socket import AF_INET, socket, SOCK_STREAM
from threading import Thread
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
from frontend.tela_login import *
from frontend.tela_chat import *
class TelaXat(QMainWindow):
def __init__(self, cs, username):
super().__init__()
self.client_socket = cs
# Load GUIs
self.tela_chat = Ui_TelaXAT()
self.tela_chat.setupUi(self)
self.tela_chat.label_user.setText(username)
self.setWindowFlags(Qt.FramelessWindowHint)
self.setAttribute(Qt.WA_TranslucentBackground)
# Mover Janela
self.tela_chat.label_user.mouseMoveEvent = self.moveWindow
self.tela_chat.cha_name.mouseMoveEvent = self.moveWindow
receive_thread = Thread(target=self.recebe)
receive_thread.start()
# Call Functions
self.tela_chat.send_message.clicked.connect(self.btSend)
self.tela_chat.button_exit.clicked.connect(self.exit_)
self.tela_chat.exit_button.clicked.connect(self.quit_)
# Functions
def btSend(self):
msg = self.tela_chat.entry_message.toPlainText()
self.client_socket.send(msg.encode())
self.tela_chat.entry_message.setText('')
def recebe(self):
"""Lida com o recebimento de mensagens"""
while True:
try:
msg = self.client_socket.recv(1024).decode("utf8")
self.tela_chat.menssage_box.append(f'@{msg}')
except OSError: # Possivelmente o cliente saiu do chat.
break
def exit_(self):
self.client_socket.close()
self.tela_login = MainWindow()
self.tela_login.show()
self.hide()
def closeEvent(self, event):
self.client_socket.close()
event.accept()
# mover janela
def mousePressEvent(self, event):
self.clickPosition = event.globalPos()
def moveWindow(self, event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.clickPosition)
self.clickPosition = event.globalPos()
event.accept()
# quit Janela
def quit_(self):
self.close()
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
# Conexão com o server
HOST = "localhost"
PORT = 50000
self.ADDR = (HOST, PORT)
self.client_socket = socket(AF_INET, SOCK_STREAM)
threadConn = Thread(target=self.ver_conn)
threadConn.start()
# Load GUIs
self.ui = Ui_TelaLogin()
self.ui.setupUi(self)
self.setWindowFlags(Qt.FramelessWindowHint)
self.setAttribute(Qt.WA_TranslucentBackground)
# Mover Janela
self.ui.frame.mouseMoveEvent = self.moveWindow
self.ui.label_status.setStyleSheet(u"border-radius:5px;\n"
"background-color: rgb(255, 0, 0);")
# Call Functions
self.ui.button_login.clicked.connect(self.login)
self.ui.pushButton.clicked.connect(self.quit_)
# Functions
def ver_conn(self):
while True:
try:
self.client_socket.connect(self.ADDR)
self.ui.label_status.setStyleSheet(u"border-radius:5px;\n"
"background-color: rgb(0, 255, 0);")
self.ui.label_on_off.setText('online')
break
except:
pass
def login(self):
user = self.ui.username.text()
print(user)
self.client_socket.send(user.encode())
self.tela_chat = TelaXat(self.client_socket, f'@{user}')
self.tela_chat.show()
self.hide()
# mover janela
def mousePressEvent(self, event):
self.clickPosition = event.globalPos()
def moveWindow(self, event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.clickPosition)
self.clickPosition = event.globalPos()
event.accept()
# quit Janela
def quit_(self):
self.close()
if __name__ == "__main__":
n_clients = 3 # Defina o numero de clientes a serem inicializados
app = QApplication(sys.argv)
window = [MainWindow() for _ in range(n_clients)]
for w in window:
w.show()
sys.exit(app.exec_()) | 28.714286 | 74 | 0.613297 | 3,909 | 0.883789 | 0 | 0 | 0 | 0 | 0 | 0 | 484 | 0.109428 |
47237997c5e5520b03dbf2e14640da28cd728e21 | 554 | py | Python | server/serv.py | pdeutsch/RobotVisionSim | 166ad4a634b5488e169aa1a8a397a36c0c63a1d1 | [
"Apache-2.0"
] | null | null | null | server/serv.py | pdeutsch/RobotVisionSim | 166ad4a634b5488e169aa1a8a397a36c0c63a1d1 | [
"Apache-2.0"
] | null | null | null | server/serv.py | pdeutsch/RobotVisionSim | 166ad4a634b5488e169aa1a8a397a36c0c63a1d1 | [
"Apache-2.0"
] | null | null | null |
import time
from networktables import NetworkTables
#NetworkTables.initialize()
#NetworkTables.startServer(persistFilename='networktables.ini', listenAddress='localhost', port=8123)
NetworkTables.startServer()
print('isServer:{}'.format(NetworkTables.isServer()))
t = NetworkTables.getTable('vision')
t.putNumber('x', 123.45)
t.putNumber('y', 22.33)
x = t.getEntry('x').getNumber(99)
y = t.getEntry('y').getNumber(99)
z = t.getEntry('z').getNumber(99)
print(f'x={x} and y={y} and z={z}')
time.sleep(20)
NetworkTables.stopServer()
print('done')
| 20.518519 | 101 | 0.729242 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 198 | 0.357401 |
4723919a9d77e95a8ef3cea2313a03d4c809f601 | 725 | py | Python | diet/urls.py | EspeIgira/Health-Diet | a0314beb787e981a50946c64f6da30bd34897cc3 | [
"MIT"
] | null | null | null | diet/urls.py | EspeIgira/Health-Diet | a0314beb787e981a50946c64f6da30bd34897cc3 | [
"MIT"
] | null | null | null | diet/urls.py | EspeIgira/Health-Diet | a0314beb787e981a50946c64f6da30bd34897cc3 | [
"MIT"
] | null | null | null | from django.conf.urls import url
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns=[
url('^$',views.diet,name = 'diet'),
url('^$',views.health_of_day,name='healthToday'),
url(r'^search/', views.search_results, name='search_results'),
url(r'^image/(\d+)',views.image,name ='image'),
url(r'^vegetable/',views.vegetable,name ='vegetable'),
url(r'^fruit/',views.fruit,name ='fruit'),
url(r'^protein/',views.protein,name ='protein'),
url(r'^cereal/',views.cereal,name ='cereal'),
url(r'^diary/',views.diary,name ='diary'),
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT) | 31.521739 | 81 | 0.675862 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 175 | 0.241379 |
4723dcee9820113cc4255a18cebb8871583017fd | 124 | py | Python | tests/data/samplecfg.py | glebshevchukk/gradslam | ce38c98537e1521b5e0a780f1c53f26924df7dab | [
"MIT"
] | 1,048 | 2020-11-06T03:49:27.000Z | 2022-03-28T17:57:05.000Z | tests/data/samplecfg.py | glebshevchukk/gradslam | ce38c98537e1521b5e0a780f1c53f26924df7dab | [
"MIT"
] | 32 | 2020-11-08T09:19:21.000Z | 2022-03-14T20:36:25.000Z | tests/data/samplecfg.py | glebshevchukk/gradslam | ce38c98537e1521b5e0a780f1c53f26924df7dab | [
"MIT"
] | 124 | 2020-11-08T07:29:57.000Z | 2022-03-16T11:40:56.000Z | import os
import sys
from gradslam.config import CfgNode as CN
cfg = CN()
cfg.TRAIN = CN()
cfg.TRAIN.HYPERPARAM_1 = 0.9
| 11.272727 | 41 | 0.717742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4724329a1d45ef768bccff6671f8a252daabbfed | 908 | py | Python | whendidit/tests/test_happen.py | RyanSkraba/whendidit | 2b9ffd489d3b894d60a4712a53ffd94ae93a194d | [
"Apache-2.0"
] | 1 | 2020-01-31T08:19:55.000Z | 2020-01-31T08:19:55.000Z | whendidit/tests/test_happen.py | RyanSkraba/whendidit | 2b9ffd489d3b894d60a4712a53ffd94ae93a194d | [
"Apache-2.0"
] | null | null | null | whendidit/tests/test_happen.py | RyanSkraba/whendidit | 2b9ffd489d3b894d60a4712a53ffd94ae93a194d | [
"Apache-2.0"
] | null | null | null | import os
import tempfile
from unittest import TestCase
import avro
import whendidit
class TestHappen(TestCase):
def test_adds_to_file(self):
with tempfile.TemporaryDirectory() as tmp_dir:
filename = os.path.join(tmp_dir, 'events.avro')
# Add one event to the file.
s = whendidit.happen("One", 123, 456, filename)
reader = avro.datafile.DataFileReader(open(filename, "rb"), avro.io.DatumReader())
events = [user for user in reader]
reader.close()
self.assertEqual(len(events), 1)
# Add another event to the same file.
s = whendidit.happen("Two", 234, 567, filename)
reader = avro.datafile.DataFileReader(open(filename, "rb"), avro.io.DatumReader())
events = [user for user in reader]
reader.close()
self.assertEqual(len(events), 2)
| 30.266667 | 94 | 0.612335 | 819 | 0.901982 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.105727 |
472749bd2670647f70a7a59990ef445be74c2560 | 846 | py | Python | hackerrank/Python/Hex Color Code/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | 4 | 2020-07-24T01:59:50.000Z | 2021-07-24T15:14:08.000Z | hackerrank/Python/Hex Color Code/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | hackerrank/Python/Hex Color Code/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | import io
import unittest
from contextlib import redirect_stdout
from unittest.mock import patch
class TestQ(unittest.TestCase):
@patch('builtins.input', side_effect=[
'11',
'#BED',
'{',
' color: #FfFdF8; background-color:#aef;',
' font-size: 123px;',
'',
'}',
'#Cab',
'{',
' background-color: #ABC;',
' border: 2px dashed #fff;',
'}',
])
def test_case_0(self, input_mock=None):
text_trap = io.StringIO()
with redirect_stdout(text_trap):
import solution
self.assertEqual(text_trap.getvalue(),
'#FfFdF8\n' +
'#aef\n' +
'#ABC\n' +
'#fff\n')
if __name__ == '__main__':
unittest.main()
| 24.171429 | 53 | 0.471631 | 697 | 0.823877 | 0 | 0 | 661 | 0.781324 | 0 | 0 | 217 | 0.256501 |
472aaec1568b0bce44426eab87e5a536d04258d9 | 5,078 | py | Python | minos/tests/model/parameters_test.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | 6 | 2017-09-15T03:14:10.000Z | 2019-12-03T04:15:21.000Z | minos/tests/model/parameters_test.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | 2 | 2017-09-21T01:49:42.000Z | 2017-09-23T16:33:01.000Z | minos/tests/model/parameters_test.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | null | null | null | '''
Created on Feb 7, 2017
@author: julien
'''
import unittest
from keras.layers.core import Dense
from minos.experiment.experiment import ExperimentParameters, Experiment,\
check_experiment_parameters, InvalidParametersException
from minos.experiment.training import Training
from minos.model.parameter import random_param_value, int_param, float_param,\
string_param
from minos.model.parameters import reference_parameters,\
register_custom_activation, register_custom_layer
class ParametersTest(unittest.TestCase):
def test_parameters(self):
experiment_parameters = ExperimentParameters(use_default_values=False)
for layer in reference_parameters['layers'].keys():
for name, _value in reference_parameters['layers'][layer].items():
self.assertIsNotNone(
experiment_parameters.get_layer_parameter('%s.%s' % (layer, name)),
'Parameter %s should exist for layer %s' % (name, layer))
def test_custom_parameters(self):
experiment_parameters = ExperimentParameters()
experiment_parameters.layout_parameter('blocks', int_param(1, 10))
param = experiment_parameters.get_layout_parameter('blocks')
self.assertTrue(
1 == param.lo and 10 == param.hi,
'Should have set values')
experiment_parameters.layout_parameter('layers', int_param(1, 3))
param = experiment_parameters.get_layout_parameter('layers')
self.assertTrue(
1 == param.lo and 3 == param.hi,
'Should have set values')
experiment_parameters.layer_parameter('Dense.activation', string_param(['relu', 'tanh']))
param = experiment_parameters.get_layer_parameter('Dense.activation')
self.assertTrue(
'relu' == param.values[0] and 'tanh' == param.values[1],
'Should have set values')
def test_random_value(self):
param = int_param(values=list(range(10)))
val = random_param_value(param)
self.assertTrue(
isinstance(val, int),
'Should be an int')
self.assertTrue(
val in param.values,
'Value should be in predefined values')
param = float_param(values=[i * 0.1 for i in range(10)])
val = random_param_value(param)
self.assertTrue(
isinstance(val, float),
'Should be a float')
self.assertTrue(
val in param.values,
'Value should be in predefined values')
param = float_param(lo=.5, hi=.7)
for _ in range(100):
val = random_param_value(param)
self.assertTrue(
isinstance(val, float),
'Should be a float')
self.assertTrue(
val <= param.hi and val >= param.lo,
'Value should be in range')
param = {
'a': float_param(optional=False),
'b': float_param(optional=False)}
for _ in range(10):
val = random_param_value(param)
self.assertTrue(
isinstance(val, dict),
'Should be a dict')
self.assertEqual(
len(param), len(val),
'Should respect non optional setting')
param = {
'a': float_param(optional=True),
'b': float_param(optional=True)}
for _ in range(10):
val = random_param_value(param)
self.assertTrue(
isinstance(val, dict),
'Should be a dict')
self.assertTrue(
len(val) >= 0 and len(val) <= len(param),
'Should respect non optional setting')
def test_search_parameters(self):
experiment = Experiment(
label='test',
parameters=ExperimentParameters(use_default_values=False))
valid_parameters = True
try:
check_experiment_parameters(experiment)
except InvalidParametersException:
valid_parameters = False
self.assertFalse(valid_parameters, 'Should have raised an exception')
def test_custom_definitions(self):
def custom_activation(x):
return x
register_custom_activation('custom_activation', custom_activation)
register_custom_layer('Dense2', Dense, dict(test='test'))
experiment_parameters = ExperimentParameters(use_default_values=False)
custom_params = experiment_parameters.get_layer_parameter('Dense2')
self.assertIsNotNone(
custom_params,
'Should have registered custom layer')
self.assertTrue(
'test' in custom_params,
'Should have registered custom layer params')
activations = experiment_parameters.get_layer_parameter('Dense.activation')
self.assertTrue(
'custom_activation' in activations.values,
'Should have registered custom_activation')
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 37.614815 | 97 | 0.62052 | 4,485 | 0.883222 | 0 | 0 | 0 | 0 | 0 | 0 | 854 | 0.168176 |
472ab92bb444adbe723ece54af105e66dd29ae4d | 4,408 | py | Python | agent/adapters/opencog/relex/__init__.py | akolonin/singnet | 3be30d40a2394325dc14bb55ea2871fe463b9405 | [
"MIT"
] | null | null | null | agent/adapters/opencog/relex/__init__.py | akolonin/singnet | 3be30d40a2394325dc14bb55ea2871fe463b9405 | [
"MIT"
] | null | null | null | agent/adapters/opencog/relex/__init__.py | akolonin/singnet | 3be30d40a2394325dc14bb55ea2871fe463b9405 | [
"MIT"
] | 1 | 2020-10-27T01:32:15.000Z | 2020-10-27T01:32:15.000Z | #
# adapters/opencog/relex/__init__.py - an AI adapter that integrates the relex natural language parser...
#
# Copyright (c) 2017 SingularityNET
#
# Distributed under the MIT software license, see LICENSE file.
#
import logging
from typing import List
import socket
import json
import select
import asyncio
from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
from sn_agent.ontology import Service
from sn_agent.service_adapter import ServiceAdapterABC, ServiceManager
logger = logging.getLogger(__name__)
class RelexAdapter(ServiceAdapterABC):
type_name = "RelexAdapter"
def __init__(self, app, service: Service, required_services: List[Service]) -> None:
super().__init__(app, service, required_services)
# Initialize member variables heres.
self.response_template = None
def post_load_initialize(self, service_manager: ServiceManager):
super().post_load_initialize(service_manager)
def get_attached_job_data(self, job_item: dict) -> dict:
# Make sure the input type is one we can handle...
input_type = job_item['input_type']
if input_type != 'attached':
logger.error("BAD input dict %s", str(job_item))
raise RuntimeError("AgentSimple - job item 'input_type' must be 'attached'.")
# Pull the input data from the job item
input_data = job_item['input_data']
if input_data is None:
raise RuntimeError("AgentSimple - job item 'input_data' must be defined.")
return input_data
def relex_parse_sentence(self, sentence: str) -> dict:
# Open a TCP socket
relex_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
time_out_seconds = 10.0
relex_socket.settimeout(time_out_seconds)
received_message = "NOT RECEIVED"
try:
# Connect to server and send data - note that "relex" below is the way to get to the
# server running in another Docker container. See: docker_compose.yml
relex_socket.connect(("relex", 9000))
# Construct the message for the relex server. NOTE: It expects a "text: " at the
# beginning and a "\n" at the end.
relex_sentence = "text: " + sentence + "\n"
# Send the sentence to the relex server.
relex_socket.sendall(relex_sentence.encode('utf-8'))
# Read the first parts
received_message = relex_socket.recv(1024)
# Strip off the length from the message
if b'\n' in received_message:
length_string, received_message = received_message.split(b'\n', 1)
bytes = int(length_string) - len(length_string)
# Read the rest if we don't already have the full JSON reply.
if bytes > 1024:
received_message = received_message + relex_socket.recv(bytes)
# Decode this since the rest of the system expects unicode strings and not the
# bytes returned from the socket.
received_message = received_message.decode('utf-8')
except socket.timeout:
print("Socket timed out")
finally:
relex_socket.close()
return received_message
def perform(self, job: JobDescriptor):
logger.debug("Performing Relex parse job.")
# Process the items in the job. The job may include many different sentences.
results = []
for job_item in job:
# Get the input data for this job.
job_data = self.get_attached_job_data(job_item)
# Check to make sure you have the data required.
sentence = job_data.get('sentence')
if sentence is None:
raise RuntimeError("RelexAdapter - job item 'input_data' missing 'sentence'")
# Send the sentence to the relex server for parsing.
parsed_sentence = self.relex_parse_sentence(sentence)
# Add the job results to our combined results array for all job items.
single_job_result = {
'relex_parse': parsed_sentence,
}
results.append(single_job_result)
# Return the list of results that come from appending the results for the
# individual job items in the job.
return results
| 33.907692 | 105 | 0.650181 | 3,833 | 0.869555 | 0 | 0 | 0 | 0 | 0 | 0 | 1,641 | 0.372278 |
472afc21ea7eeaee85fc2d73a6095f40e07d89e4 | 22,482 | py | Python | oompa/tracking/github/GitHubNeo.py | sjtsp2008/oompa | 2dfdecba192c408c0463da27e0b5859ef9ce3db6 | [
"Apache-2.0"
] | 2 | 2016-02-23T00:58:11.000Z | 2017-06-14T15:39:22.000Z | oompa/tracking/github/GitHubNeo.py | sjtsp2008/oompa | 2dfdecba192c408c0463da27e0b5859ef9ce3db6 | [
"Apache-2.0"
] | 3 | 2015-06-21T11:13:50.000Z | 2015-06-21T13:27:03.000Z | oompa/tracking/github/GitHubNeo.py | sjtsp2008/oompa | 2dfdecba192c408c0463da27e0b5859ef9ce3db6 | [
"Apache-2.0"
] | null | null | null | #
# GitHubNeo.py
#
# note: i tried using bulbs, which would be easier to
# migrate to other tinkerpop graph engines, but had
# trouble authenticating
#
#
"""
package oompa.tracking.github
experiments with working on github graphs in neo
uses py2neo
TODO: i think bulb seems to have better object modeling (but doesn't work for me)
"""
from datetime import timedelta
from datetime import datetime
import py2neo
from oompa.tracking.github import github_utils
Node = py2neo.Node
Relationship = py2neo.Relationship
"""
misc neo notes:
visit: http://localhost:7474/
default user - neo4j
part of neo walkthrough
CREATE (ee:Person { name: "Emil", from: "Sweden", klout: 99 })
() means "node"
{} surround attrs
Person is the label
MATCH (ee:Person) WHERE ee.name = "Emil" RETURN ee;
complex creation:
MATCH (ee:Person) WHERE ee.name = "Emil"
CREATE (js:Person { name: "Johan", from: "Sweden", learn: "surfing" }),
(ir:Person { name: "Ian", from: "England", title: "author" }),
(rvb:Person { name: "Rik", from: "Belgium", pet: "Orval" }),
(ally:Person { name: "Allison", from: "California", hobby: "surfing" }),
(ee)-[:KNOWS {since: 2001}]->(js),(ee)-[:KNOWS {rating: 5}]->(ir),
(js)-[:KNOWS]->(ir),(js)-[:KNOWS]->(rvb),
(ir)-[:KNOWS]->(js),(ir)-[:KNOWS]->(ally),
(rvb)-[:KNOWS]->(ally)
pattern matching:
MATCH (ee:Person)-[:KNOWS]-(friends)
WHERE ee.name = "Emil" RETURN ee, friends
Pattern matching can be used to make recommendations. Johan is
learning to surf, so he may want to find a new friend who already
does:
MATCH (js:Person)-[:KNOWS]-()-[:KNOWS]-(surfer)
WHERE js.name = "Johan" AND surfer.hobby = "surfing"
RETURN DISTINCT surfer
() empty parenthesis to ignore these nodes
DISTINCT because more than one path will match the pattern
surfer will contain Allison, a friend of a friend who surfs
"""
def parseFreshness(freshness):
"""
TODO: not fully general/bulletproof yet
"""
pieces = freshness.split()
days = 0
hours = 0
minutes = 0
for piece in pieces:
unit = piece[-1]
value = int(piece[:-1])
if unit == "d":
days = value
elif unit == "h":
hours = value
elif unit == "m":
minutes = value
else:
raise Exception("unknown time unit", unit, freshness)
pass
freshness_delta = timedelta(days = days, hours = hours, minutes = minutes)
return freshness_delta
class GitHubNeo:
"""
interface for lazy github graph in neo4j
- update
- list
- track
- discover
"""
# ISO format
_dtFormat = "%Y-%m-%dT%H:%M:%S"
def __init__(self, config, githubHelper):
# XXX get from config
neo_url = config.get("neo.github.url")
neo_user = config.get("neo.github.user")
neo_passwd = config.get("neo.github.passwd")
# TODO: derive from the url
neo_host = "localhost:7474"
# TODO: if freshness, parse it to a real latency (e.g., "4d" -> seconds)
self.freshness = config.get("neo.github.freshness")
if self.freshness:
self.freshness = parseFreshness(self.freshness)
pass
py2neo.authenticate(neo_host, neo_user, neo_passwd)
self.graph = py2neo.Graph(neo_url)
self.githubHelper = githubHelper
self._establishNeoSchema()
return
def _establishNeoSchema(self):
"""
set up constraints on relationships and nodes in neo graph
note: i believe that schema constraints are volatile, per-session
if i don't apply these contraints, on a graph that had them
in previous sessions, i can violate the previous contraints
"""
schema = self.graph.schema
try:
schema.create_uniqueness_constraint("User", "name")
schema.create_uniqueness_constraint("Organization", "name")
schema.create_uniqueness_constraint("Repository", "name")
# except py2neo.error.ConstraintViolationException:
except:
# already established
return
# TODO: User
# TODO: Organization
# TODO: relationships
return
def query(self, query):
"""
submit arbitrary cypher-syntax query to graph
query is a string
"""
for record in self.graph.cypher.execute(query):
yield record
return
def getNodeType(self, node):
"""
return the type of the given node
"""
# XXX still figuring out LabelSet - don't know how to get values as list
return node.labels.copy().pop()
def getNode(self, nodeName, nodeType = None):
"""
returns a neo Node
TODO: if nodeType specified, use it (esp User vs Organization)
"""
# print("NT: %s" % nodeType)
typeSpec = ""
# XXX figure out best way to support this
# if nodeType is not None:
# typeSpec = ":%s" % nodeType
# pass
# XXX this does not feel like "The Best Way" to simply get a node
query = 'MATCH (node {name:"%s"}%s) RETURN node' % ( nodeName, typeSpec )
# print(" Q: %r" % query)
records = list(self.query(query))
if not records:
return None
if len(records) > 1:
print("XXX getNode() plural records: %s %s (%r)" % ( nodeType, nodeType, typeSpec ))
for record in records:
print(" R: %r" % ( record, ))
pass
xxx
pass
record = records[0]
return self.graph.hydrate(record[0])
def _now(self):
return datetime.utcnow().replace(microsecond = 0)
def _parseISODTStr(self, dtStr):
return datetime.strptime(dtStr, self._dtFormat)
def createNode(self, name, nodeType):
# we don't want the microsecond junk in time string
now = self._now()
node = Node(nodeType, name = name, createdDT = now.isoformat())
self.graph.create(node)
return node
def getOrAddNode(self, name, nodeType = None):
node = self.getNode(name, nodeType)
if node is not None:
# print("# node already in graph")
return node
# print("# node not in graph - have to create")
if nodeType is None:
for nodeType, name, obj in self.githubHelper.getKindNameAndObject([ name, ]):
# only one
break
if nodeType is None:
print(" could not determine nodeType for name: %s" % name)
xxx
pass
pass
return self.createNode(name, nodeType)
#
# neo edge relationships for lists "away from" certain types of github objects
#
# some list names have an alias, because the list name is confusing
#
# TODO: test if simple rule of removing the final "s" works. that would be simpler
# - there are a couple of exceptions
#
_relationships = {
"Repository" : [
( "stargazers", "starred", "from", ),
( "subscribers", "subscriber", "from", ),
( "contributors", "contributor", "from", ),
( "forks", "forkOf", "from", ),
# ...
],
"User" : [
( "followers", "follows", "from", ),
( "following", "follows", "to", ),
( "starred_repositories", "starred", "to", ),
( "subscriptions", "subscriber", "to", ),
( "organizations", "memberOf", "to", ),
],
}
def updateRelationships(self, obj, slot, relationshipLabel, direction, destNode):
"""
obj is a github3 object (repository, user, organization)
direction is "from" or "to"
TODO: support attribute decorators
generates stream of entitySpec
"""
destNodeType = self.getNodeType(destNode)
graph = self.graph
print("updateRelationships: %-25s - %-25s %4s - %s" % (
slot, relationshipLabel, direction, destNode.properties["name"] ))
# XXX need otherNodeLabelGetter
# - .name, .login, ...
# determine neighbor nodeType by slot name
# TODO: use a dictionary - simpler
neighborNodeType = None
# XXX just figure this out by what we get back
if slot in [ "stargazers", "contributors", ]:
neighborNodeType = "User"
elif slot in [ "followers", "following" ]:
neighborNodeType = "User"
elif slot == "organizations":
neighborNodeType = "Organization"
elif slot == "starred_repositories":
neighborNodeType = "Repository"
elif slot == "subscriptions":
neighborNodeType = "Repository"
elif slot == "forks":
neighborNodeType = "Repository"
elif slot == "subscribers":
# i think that things can subsribe to users or orgs, too
# this is currently just Users subscribed to Repository
neighborNodeType = "User"
else:
print(" XXX slot not handled in switch yet - %r" % slot)
xxx
pass
if neighborNodeType == "User":
nodeNameAttr = "login"
elif neighborNodeType == "Organization":
nodeNameAttr = "name"
elif neighborNodeType == "Repository":
nodeNameAttr = "full_name"
else:
xxx
pass
# print("# nodeNameAttr: %s - %s - %s" % ( slot, neighborNodeType, nodeNameAttr ))
# TODO: get all of them, and batch-update
neighbors = []
for value in getattr(obj, slot)():
# if slot == "forks":
# print(" fork obj")
# github_utils.dumpSlotValues(obj)
# value is another github object (User, ...)
# name = value.name
# name = str(value)
nodeName = getattr(value, nodeNameAttr)
neighbors.append(( neighborNodeType, nodeName ))
pass
# TODO: batch-update
neighbors = sorted(neighbors, key = lambda _tuple : _tuple[1])
for neighborNodeType, nodeName in neighbors:
# only if verbose tracing
# print(" %s: %r" % ( relationshipLabel, nodeName ))
srcNode = Node(neighborNodeType, name = nodeName)
# graph.merge_one(Relationship(srcNode, relationshipLabel, destNode))
# XXX try/except is sloppy - i don't get merge vs create yet
if direction == "from":
relationship = Relationship(srcNode, relationshipLabel, destNode)
else:
relationship = Relationship(destNode, relationshipLabel, srcNode)
pass
try:
graph.create(relationship)
except:
# already exists
pass
yield ( neighborNodeType, nodeName )
pass
# need to flush anything?
return
def _getRelationshipTuples(self, nodeType, relationships = None):
"""
TODO: memoize
"""
# print("_getRelationshipTuples(): %s %s" % ( nodeType, relationships ))
#
# XXX still working out the best way to normalize github
# relationships to neo relationships
#
for relationshipTuple in self._relationships[nodeType]:
listName, relationshipLabel, direction = relationshipTuple
if relationships:
keep = False
for rel in relationships:
if rel == relationshipLabel:
keep = True
break
pass
if not keep:
continue
pass
yield relationshipTuple
pass
return
def updateGithubObj(self, githubObj, node, relationships = None):
"""
githubObj is a GitHub3 Repository, User, or Organization
node is a py2neo Node
"""
# starting to generalize
graph = self.graph
nodeType = self.getNodeType(node)
# note that full_name is something that i attach
if nodeType == "Repository":
name = githubObj.full_name
else:
name = githubObj.name
pass
# TODO: want to report different things for different object -
# user needs login and name
print("GitHubNeo.updateGithubObj(): %s" % name.encode("utf8"))
relationshipTuples = list(self._getRelationshipTuples(nodeType, relationships))
# TODO: *local* LRU cache user and repo - may also be on contributes, subscribes.
# make sure we only pay github points once
for listName, relationshipLabel, direction in relationshipTuples:
for entitySpec in self.updateRelationships(githubObj,
listName,
relationshipLabel,
direction,
node):
yield entitySpec
pass
pass
node.properties["updatedDT"] = datetime.utcnow().replace(microsecond = 0)
node.push()
# what else should go in graph?
# .parent
# .source
# .description
# .homepage
# .language
# .last_modified
# .updated_at
# branches()
# code_frequency()
# XXX blocked - requires authentication
# dumpList(obj, "collaborators")
# comments()
# commit_activity()
# commits()
# contributor_statistics()
# github_utils.dumpList(obj, "contributors")
# default_branch
# deployments() ???
# events()
# github_utils.dumpList(obj, "forks")
# hooks() ???
# issues()
# keys() ???
# labels() ??? i think these are tags used in issues/planning
# github_utils.dumpList(obj, "languages")
# milestones()
# notifications()
# open_issues_count ???
# owner (a User object)
# pull_requests
# refs() ???
# releases() ???
# size (what are units?)
# statuses() ?
# github_utils.dumpList(obj, "subscribers")
# tags()
# i think that tree is some sort of file tree. (i was hoping it was fork ancestry)
# tree = obj.tree()
# print("TREE: %s" % tree)
# teams()
# { "Last-Modified": "",
# "all": [0, 0, 1, 1, ..., (52 weeks?) ],
# "owner": [ 0, 0, 0, 0, ... ] }
# print(" weekly_commit_count: %s" % obj.weekly_commit_count())
return
def updateUser(self, githubObj, node):
"""
user is a GitHub3 User
TODO: refactor - merge with updateRepository - just a generic
"""
graph = self.graph
nodeType = self.getNodeType(node)
# was it forked from something?
print("GitHubNeo.updateUser(): %s - %r" % ( user.login, user.name ))
nodeType = "User"
node = Node(nodeType, name = user.login)
# use merge_one to create if it does not already exist
# XXX merge_one does not persist the node?
# graph.merge_one(node)
graph.create(node)
for listNameTuple in self._relationships[nodeType]:
if isinstance(listNameTuple, tuple):
listNameTuple, relationshipLabel = listNameTuple
else:
listName = listNameTuple
relationshipLabel = listName
pass
for entitySpec in self.updateRelationships(user, listName, relationshipLabel, node):
yield entitySpec
pass
pass
return
def updateOrganization(self, name, org):
"""
org is a GitHub3 Organization
"""
graph = self.graph
# was it forked from something?
print("GitHubNeo.updateOrg(): %s" % user)
xxx
print(" bio: %s" % obj.bio)
print(" company: %s" % obj.company)
print(" location: %s" % obj.location)
dumpList(obj, "public_members")
dumpList(obj, "repositories")
return
def _nodeFreshEnough(self, node):
updatedDTStr = node.properties.get("updatedDT")
if updatedDTStr:
age = self._now() - self._parseISODTStr(updatedDTStr)
if age <= self.freshness:
return True
pass
return False
def _getCachedNeighbors(self, node, relationships = None):
nodeType = self.getNodeType(node)
# print(" _getCachedNeighbors(): %-12s %s" % ( nodeType, node.properties["name"] ))
if relationships is None:
#
# list of ( githubSlot, neoRelationLabel )
#
relationships = self._relationships[nodeType]
neoRelationships = []
for relationshipInfo in relationships:
if isinstance(relationshipInfo, tuple):
neoRelationship = relationshipInfo[1]
else:
neoRelationship = relationshipInfo
pass
neoRelationships.append(neoRelationship)
pass
pass
else:
#
# map relationships in to neo relationships
#
neoRelationships = relationships
pass
i = 1
for neoRelationship in neoRelationships:
# print(" neoRelationship: %s" % neoRelationship)
neighbors = []
for rel in node.match(neoRelationship):
if node == rel.start_node:
neighborNode = rel.end_node
else:
neighborNode = rel.start_node
pass
# XXX expensive. we can already know this, from the neoRelationship
neighborNodeType = self.getNodeType(neighborNode)
# yield neighborNodeType, neighborNode.properties["name"]
neighbors.append(( neighborNodeType, neighborNode.properties["name"], 1 ))
i += 1
pass
if neighbors:
print(" %5d %s" % ( len(neighbors), neoRelationship ))
pass
# XXX optional. user may want to sort by something else
# (added date - but that's not supported yet)
neighbors = sorted(neighbors, key = lambda _tuple : _tuple[1])
for neighborTuple in neighbors:
yield neighborTuple
pass
pass
return
def update(self, entitySpecs, numHops = None, relationships = None):
"""update the edges/relationships around the specified node names
creates the nodes if they don't already exist
entitySpecs is list of github names - Repository, User,
Organization. can by type-hinted - org:..., user:...,
repo:..., or else we guess, using the helper
if staleness constraints specified, will use what's in cache
if new enough (to save github points)
TODO: maybe able to specify only certain relationship types to update
"""
if numHops is None:
numHops = 1
pass
hop = 1
# list of entities left to check, and their hop
#
# a seed is at hop 1 (versus 0)
boundary = []
for entitySpec in entitySpecs:
boundary.append(( entitySpec, hop ))
pass
helper = self.githubHelper
freshness = self.freshness
while boundary:
entitySpec, _hop = boundary[0]
boundary = boundary[1:]
print("GitHubNeo.update: %s %5d %5d %s" %
( _hop, len(boundary), helper.checkRatePointsLeft(), entitySpec, ))
nodeType = None
extra = None
if isinstance(entitySpec, tuple):
nodeType = entitySpec[0]
name = entitySpec[1]
if len(entitySpec) > 2:
extra = entitySpec[2]
pass
pass
else:
name = entitySpec
pass
node = self.getOrAddNode(name, nodeType)
nodeType = self.getNodeType(node)
if freshness is not None and self._nodeFreshEnough(node):
# print(" using cached relationships: %s - %s" % ( nodeType, name ))
neighbors = self._getCachedNeighbors(node, relationships = relationships)
else:
githubObj = helper.getGithubObject(name, nodeType)
neighbors = self.updateGithubObj(githubObj, node, relationships = relationships)
pass
# need to drain the stream, even if we don't add them to boundary
neighbors = list(neighbors)
if _hop < numHops:
for _entitySpec in neighbors:
boundary.append(( _entitySpec, _hop + 1 ))
# print(" added to boundary: %s %s" % ( _hop + 1, _entitySpec ))
pass
pass
# print("")
pass
return
pass
| 26.892344 | 96 | 0.526644 | 19,959 | 0.887777 | 11,244 | 0.500133 | 0 | 0 | 0 | 0 | 8,963 | 0.398674 |
472b7eae05711c0a1df1ff938d239d30340dd29a | 60 | py | Python | tests/dummy_cell_env.py | pktl/distributed_game_of_life | 9ac63cec43977fff2653e7a1b9c7a4c12bbd7925 | [
"MIT"
] | null | null | null | tests/dummy_cell_env.py | pktl/distributed_game_of_life | 9ac63cec43977fff2653e7a1b9c7a4c12bbd7925 | [
"MIT"
] | null | null | null | tests/dummy_cell_env.py | pktl/distributed_game_of_life | 9ac63cec43977fff2653e7a1b9c7a4c12bbd7925 | [
"MIT"
] | null | null | null | from src.dgol_worker.cell_env import CellEnv
ce = CellEnv() | 20 | 44 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
472bc55d336ce348951bd8eeba92c1a80bba3988 | 526 | py | Python | webServ/app.py | harryparkdotio/dabdabrevolution | ffb8a28379edd5e44d23d39ee5e5cc38ddf5181a | [
"MIT"
] | 9 | 2017-01-10T03:40:18.000Z | 2019-03-20T11:27:22.000Z | webServ/app.py | harryparkdotio/dabdabrevolution | ffb8a28379edd5e44d23d39ee5e5cc38ddf5181a | [
"MIT"
] | null | null | null | webServ/app.py | harryparkdotio/dabdabrevolution | ffb8a28379edd5e44d23d39ee5e5cc38ddf5181a | [
"MIT"
] | 3 | 2018-02-28T04:10:55.000Z | 2018-12-21T14:19:12.000Z | from flask import Flask, Response, send_from_directory
import random, time
app = Flask(__name__, static_folder='www')
@app.route('/')
def index():
return ''
@app.route('/stream')
def stream():
def event():
while True:
yield "data: " + random.choice(['a', 'b', 'c', 'd']) + "nn"
with app.app_context():
time.sleep(1)
return Response(event(), mimetype="text/event-stream")
@app.route('/static/<path:path>')
def static_f(path):
return app.send_static_file(path)
if __name__ == '__main__':
app.run(debug=True) | 22.869565 | 62 | 0.671103 | 0 | 0 | 205 | 0.389734 | 354 | 0.673004 | 0 | 0 | 93 | 0.176806 |
472bdb7d6f77fde9bd50d0c0ca58b1a849377617 | 1,039 | py | Python | scripts/jandan_crawler.py | 920671233/SpiderItems | cf615de5f496083e43ca067200318d4a49f79cab | [
"Apache-2.0"
] | null | null | null | scripts/jandan_crawler.py | 920671233/SpiderItems | cf615de5f496083e43ca067200318d4a49f79cab | [
"Apache-2.0"
] | null | null | null | scripts/jandan_crawler.py | 920671233/SpiderItems | cf615de5f496083e43ca067200318d4a49f79cab | [
"Apache-2.0"
] | null | null | null | import re
import urllib.request
from urllib.error import URLError
def crawl():
html = urllib.request.urlopen('http://jandan.net/page/1').read()
html = str(html)
pattern1 = '<div id="content">.*<div class="post f" style="padding-left:210px;">'
result1 = re.compile(pattern1).findall(html)
result1 = result1[0]
pattern2 = '<img src="//(.+?\.jpg)!custom" width="175" height="98" />'
imglist = re.compile(pattern2).findall(result1)
# 添加请求头信息
opener = urllib.request.build_opener()
opener.addheaders = [("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36")]
urllib.request.install_opener(opener)
x = 1
for imgurl in imglist:
imgname = "E:/img/" + str(x) + ".jpg"
imgurl = "http://" + imgurl
try:
urllib.request.urlretrieve(imgurl, imgname, )
except URLError as e:
print(e)
finally:
x += 1
if __name__ == '__main__':
crawl()
| 32.46875 | 159 | 0.60924 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 341 | 0.323837 |
472ccb20b84f3fb1960515f43b725476c316bd05 | 212 | py | Python | SRC/Chapter_03-When-Object-Are-Alike/supplier.py | eminemence/python3_OOPs | a61f2649cf9c68a782a3e90c1f667877597dfb1d | [
"MIT"
] | null | null | null | SRC/Chapter_03-When-Object-Are-Alike/supplier.py | eminemence/python3_OOPs | a61f2649cf9c68a782a3e90c1f667877597dfb1d | [
"MIT"
] | null | null | null | SRC/Chapter_03-When-Object-Are-Alike/supplier.py | eminemence/python3_OOPs | a61f2649cf9c68a782a3e90c1f667877597dfb1d | [
"MIT"
] | 1 | 2021-01-13T08:25:04.000Z | 2021-01-13T08:25:04.000Z | import contact
class Supplier(contact.Contact):
def order(self, order):
print(
"If this were a real system we would send"
"{} order to {}".format(order, self.name)
)
| 21.2 | 54 | 0.566038 | 194 | 0.915094 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.273585 |
472d33ee671b834600d631e1bc6cb1211dc6683a | 1,356 | py | Python | gamestate/Main.py | skitter30/modbot | 6526f5bd1b8e73143ff45d71081d64c6e4aa8ea5 | [
"MIT"
] | 2 | 2019-12-29T13:15:13.000Z | 2020-04-27T07:32:21.000Z | gamestate/Main.py | skitter30/modbot | 6526f5bd1b8e73143ff45d71081d64c6e4aa8ea5 | [
"MIT"
] | 9 | 2018-05-02T14:39:48.000Z | 2018-05-27T22:08:53.000Z | gamestate/Main.py | skitter30/modbot | 6526f5bd1b8e73143ff45d71081d64c6e4aa8ea5 | [
"MIT"
] | 6 | 2018-05-08T19:48:35.000Z | 2018-06-03T02:58:55.000Z | from GameState import GameState, Game
from Component import Component
def main():
# Testing code
"""
game = Game('large_normal_186')
game.load_events()
game.process_events()
print(game.generate_vote_count(278))
print('\nNow in RC style!\n')
print(game.generate_vote_count(278, style='large_normal_186'))
my_list = Component.create('players_list', game_state=game.game_state, post=game.game_state.post)
print(my_list.generate())
print('\nLiving Players:\n')
my_list = Component.create('players_list', game_state=game.game_state, post=game.game_state.post, filter='living')
print(my_list.generate())
print('\nDead Players:\n')
my_list = Component.create('players_list', game_state=game.game_state, post=game.game_state.post, filter='dead')
print(my_list.generate())
print('\nModkilled Players:\n')
my_list = Component.create('players_list', game_state=game.game_state, post=game.game_state.post, filter='modkilled')
print(my_list.generate())
print('breakpoint')
"""
game = Game('mini_theme_1974')
game.load_events()
game.process_events()
for election in game.game_state.elections:
for vc in election.vote_counts:
print(game.generate_vote_count(vc, style='Micc') + '\n\n')
print('breakpoint')
if __name__ == '__main__':
main()
| 35.684211 | 121 | 0.69764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,007 | 0.742625 |
472e95ac4a9261e3fcd32bd608de33687d066d95 | 5,772 | py | Python | src/enviroment.py | Menoita99/Reinforcement-Learning-Based-Encoder-Decoder-Implementation- | ed67b4ef6e23ee403ca43dc061c94301f1c685af | [
"MIT"
] | 1 | 2021-07-11T13:12:08.000Z | 2021-07-11T13:12:08.000Z | src/enviroment.py | Menoita99/Reinforcement-Learning-Based-Encoder-Decoder-Implementation- | ed67b4ef6e23ee403ca43dc061c94301f1c685af | [
"MIT"
] | 1 | 2022-03-24T02:15:30.000Z | 2022-03-24T02:15:30.000Z | src/enviroment.py | Menoita99/Reinforcement-Learning-Based-Encoder-Decoder-Implementation- | ed67b4ef6e23ee403ca43dc061c94301f1c685af | [
"MIT"
] | 1 | 2021-07-11T13:12:08.000Z | 2021-07-11T13:12:08.000Z | import enum
import random
import pandas as pd
class Actions(enum.IntEnum):
Buy = 0
Sell = 1
Noop = 2
Close = 3
@staticmethod
def random():
rand = random.randint(0, 3)
if rand == 0:
return Actions.Buy
elif rand == 1:
return Actions.Sell
elif rand == 2:
return Actions.Noop
else:
return Actions.Close
class Environment:
def __init__(self, useWindowState=False, windowSize=10, market="EUR_USD", initialMoney=1000,seed=1,dataSrc="data/close ema200 ema50 ema13 ema7 macd_macd macd_signal.csv"):
self.seed = seed
self.market = market
self.useWindowState = useWindowState
self.windowSize = windowSize
self.dataSrc = dataSrc
self.initialMoney = initialMoney
self.money = initialMoney
self.minimumMoney = initialMoney * 0.25
self.leverage = 30
self.prevMoney = initialMoney
self.prevAction = Actions.Noop
self.currentCandle = 0
self.penalty = 0
self.position = None
self.count=0
self._loadData()
def _loadData(self):
self.states = pd.read_csv(self.dataSrc,sep=";").values.tolist()
def reset(self):
self.prevAction = Actions.Noop
self.penalty = 0
self.count = 0
self.position = None
self.money = self.initialMoney
self.prevMoney = self.initialMoney
random.seed(self.seed)
self.currentCandle = random.randint(0 if not self.useWindowState else self.windowSize, int(len(self.states) / 2))
self.currentPrice = self.states[self.currentCandle][0]
return self.generateState()
def addPositionToCandle(self, candle):
cdl = candle.copy()
cdl.append(0 if self.position is None else 1 if self.position[0] == Actions.Buy else -1)
return cdl
def generateState(self):
state = self.addPositionToCandle(self.states[self.currentCandle])
self.currentPrice = state[0]
if self.useWindowState:
windowState = []
for i in range(-self.windowSize+1,0):
windowState.append(self.addPositionToCandle(self.states[i+self.currentCandle]))
windowState.append(state)
return windowState
return state
def step(self, action):
self.currentCandle += 1
state = self.generateState()
# print("-----------------------------------------------------------------------------------------------------------------------")
# print(str(self.getCurrentMoney(self.currentPrice)) + " " + str(self.position))
self.trade(action, self.currentPrice)
stop = self.currentCandle + 1 >= len(self.states) or self.getCurrentMoney(self.currentPrice) <= self.minimumMoney
self.prevAction = action
output = state,self.reward(action, self.currentPrice), stop, [self.getCurrentMoney( self.currentPrice)]
# print(str(self.currentCandle) + "-> " + str(action) + " " + str(output))
# print(str(self.getCurrentMoney(self.currentPrice)) + " " + str(self.position))
# if stop:
# print("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
if stop:
print(self.count)
else:
self.count += 1
return output
def getCurrentMoney(self,price):
if self.position is None:
return self.money
else:
positionAction, positionPrice, units, pipeteValue = self.position
if positionAction == Actions.Buy:
pipetes = int((price - positionPrice) * 100_000)
if positionAction == Actions.Sell:
pipetes = int((positionPrice - price) * 100_000)
return units / self.leverage + pipetes * pipeteValue
def reward(self, action, price):
reward = self.getCurrentMoney(price) - self.prevMoney - (.5 if self.position is None else 0) + self.penalty
self.prevMoney = self.getCurrentMoney(price)
self.penalty = 0
return reward
def trade(self, action, price):
if action == Actions.Noop:
return
if self.position is None:
if action == Actions.Close:
self.penalty = -10
return
#raise Exception("There is no operation to close")
self.position = [action, price, self.money * self.leverage, self.money * self.leverage * 0.00001 / price]
else:
positionAction, positionPrice, units, pipeteValue = self.position
if action == positionAction:
self.penalty = -10
return
#raise Exception("Operation have the same action " + str(action))
elif action == Actions.Close:
if positionAction == Actions.Buy:
pipetes = int((price - positionPrice) * 100_000)
if positionAction == Actions.Sell:
pipetes = int((positionPrice - price) * 100_000)
self.money = units / self.leverage + pipetes * pipeteValue
self.position = None
else:
if action == Actions.Sell: # close buy operation
pipetes = int((price - positionPrice) * 100_000)
if action == Actions.Buy: # close sell operation
pipetes = int((positionPrice - price) * 100_000)
self.money = units / self.leverage + pipetes * pipeteValue
self.position = [action, price, self.money * self.leverage, self.money * self.leverage * 0.00001 / price]
| 31.889503 | 175 | 0.560638 | 5,717 | 0.990471 | 0 | 0 | 279 | 0.048337 | 0 | 0 | 760 | 0.13167 |
472ec64557b8a07de29a23ddc1a024b5551b7046 | 2,225 | py | Python | setup.py | MarkusH/django-osm-field | b7b8f6bd324a3dc29924962c303a6c4166840de4 | [
"MIT"
] | 23 | 2015-10-13T05:55:37.000Z | 2021-12-09T02:43:59.000Z | setup.py | MarkusH/django-osm-field | b7b8f6bd324a3dc29924962c303a6c4166840de4 | [
"MIT"
] | 18 | 2015-04-21T16:35:21.000Z | 2021-10-10T14:39:14.000Z | setup.py | MarkusH/django-osm-field | b7b8f6bd324a3dc29924962c303a6c4166840de4 | [
"MIT"
] | 13 | 2015-04-28T22:34:55.000Z | 2022-03-28T16:03:42.000Z | import setuptools
with open("README.md", "r") as f:
long_description = f.read()
setuptools.setup(
name="django-osm-field",
author="Markus Holtermann",
author_email="info@markusholtermann.eu",
description="Django OpenStreetMap Field",
license="MIT",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/MarkusH/django-osm-field",
project_urls={
"CI": "https://github.com/MarkusH/django-osm-field/actions", # noqa
"Changelog": "https://github.com/MarkusH/django-osm-field/blob/main/CHANGELOG.md", # noqa
"Issues": "https://github.com/MarkusH/django-osm-field/issues", # noqa
},
packages=setuptools.find_packages(
exclude=[
"*.example",
"*.example.*",
"example.*",
"example",
"*.tests",
"*.tests.*",
"tests.*",
"tests",
],
),
include_package_data=True,
install_requires=["Django>=2.2"],
extras_require={
"dev": ["pre-commit"],
"docs": [
"Django",
"sphinx_rtd_theme",
"Sphinx>=3.0,<3.4",
],
"test": [
"coverage[toml]>=5,<6",
"Django",
],
},
setup_requires=["setuptools_scm>=5<6"],
use_scm_version=True,
keywords="OpenStreetMap, OSM, Django, Geo, Geoposition",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Framework :: Django :: 2.2",
"Framework :: Django :: 3.0",
"Framework :: Django :: 3.1",
"Framework :: Django :: 3.2",
"Framework :: Django :: 4.0",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
python_requires=">=3.5",
)
| 31.785714 | 98 | 0.545169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,223 | 0.549663 |
4730a28366b706a90435534ccea87bb676932eaa | 24,143 | py | Python | src/tools/bin/archive_to_arkivum.py | aherbert/omero-archiving | ce52a63215ea9b75a50d35ee9690fe37ee74794a | [
"0BSD"
] | 1 | 2021-07-08T11:03:34.000Z | 2021-07-08T11:03:34.000Z | src/tools/bin/archive_to_arkivum.py | aherbert/omero-archiving | ce52a63215ea9b75a50d35ee9690fe37ee74794a | [
"0BSD"
] | 1 | 2022-01-20T12:24:43.000Z | 2022-01-20T17:48:01.000Z | src/tools/bin/archive_to_arkivum.py | aherbert/omero-archiving | ce52a63215ea9b75a50d35ee9690fe37ee74794a | [
"0BSD"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted.
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# ------------------------------------------------------------------------------
"""
This script processes all the files listed in the archive register. A checksum
is computed, the file copied to Arkivum, and a checksum made to confirm
the copy. The original is removed when the file has been confirmed as ingested
by Arkivum and a symbolic link is made from the original location to the
archived file.
"""
import sys
import os
import shutil
import time
import configparser
import hashlib
import requests
import urllib
# Get rid of the Unverified HTTPS request warning
try:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
except:
pass
from stat import *
from zlib import adler32
import re
import platform
import smtplib
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
from optparse import OptionParser, OptionGroup
import gdsc.omero
###############################################################################
def init_options():
"""Initialise the program options"""
parser = OptionParser(usage="usage: %prog [options] list",
description="Program to archive files to Arkivum",
add_help_option=True, version="%prog 1.0")
group = OptionGroup(parser, "Archive")
group.add_option("--archive_log", dest="archive_log",
default=gdsc.omero.ARCHIVE_LOG,
help="Directory for archive logs [%default]")
group.add_option("--archive_job", dest="archive_job",
default=gdsc.omero.ARCHIVE_JOB,
help="Directory for archive jobs [%default]")
group.add_option("--arkivum_root", dest="arkivum_root",
default=gdsc.omero.ARKIVUM_ROOT,
help="Arkivum root (for the mounted appliance) [%default]")
group.add_option("--arkivum_path", dest="arkivum_path",
default=gdsc.omero.ARKIVUM_PATH,
help="Arkivum path (directory to copy files) [%default]")
group.add_option("--to_archive", dest="to_archive",
default=gdsc.omero.TO_ARCHIVE_REGISTER,
help="To-Archive register [%default]")
group.add_option("--archived", dest="archived",
default=gdsc.omero.ARCHIVED_REGISTER,
help="Archived register [%default]")
parser.add_option_group(group)
group = OptionGroup(parser, "Arkivum")
# Decide if this should be:
# amber (copied to data centres)
# green (tape sent to escrow)
group.add_option("--state", dest="state",
default='green',
help="Replication state for deletion [%default]")
parser.add_option_group(group)
return parser
###############################################################################
def log(msg):
"""
Print a message
@param msg: The message
"""
print(msg)
def error(msg):
"""
Print an error message
@param msg: The message
"""
print("ERROR:", msg)
def fatal(msg):
"""
Print a fatal error
@param msg: The message
"""
print("FATAL:", msg)
def die(msg):
"""
Print a fatal error then exit
@param msg: The message
"""
fatal(msg)
sys.exit(1)
###############################################################################
def validate_email(userEmail):
"""
Checks that a valid email address is present
@param userEmail: The e-mail address
"""
# Validate with a regular expression. Not perfect but it will do.
return re.match("^[a-zA-Z0-9._%-]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$",
userEmail)
def send_email(userEmail, job_file, result):
"""
E-mail the result to the user.
@param userEmail: The e-mail address
@param job_file : The job file
@param result : The result status
"""
send_to = []
# Comment this out to prevent admin receiving all the emails
send_to.extend(gdsc.omero.ADMIN_EMAILS)
if validate_email(userEmail):
send_to.append(userEmail)
if not send_to:
return
name = os.path.basename(job_file)
msg = MIMEMultipart()
msg['From'] = gdsc.omero.ADMIN_EMAIL
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = '[OMERO Job] Archive Job : ' + result
msg.attach(MIMEText("""OMERO Archive Job : %s
Result : %s
Your archive job file is attached.
---
OMERO @ %s """ % (name, result, platform.node())))
with open(job_file, "rb") as f:
name = name + '.txt'
part = MIMEApplication(
f.read(),
Name=name
)
part['Content-Disposition'] = ('attachment; filename="%s"' % name)
msg.attach(part)
smtpObj = smtplib.SMTP('localhost')
smtpObj.sendmail(gdsc.omero.ADMIN_EMAIL, send_to, msg.as_string())
smtpObj.quit()
def email_results(userEmail, job_file, result):
"""
E-mail the result to the user.
@param userEmail: The e-mail address
@param job_file : The job file
@param result : The result status
"""
send_email(userEmail, job_file, result)
def addState(state, size):
"""
Increment the count of files in their given state
@param state: The state
@param size: The byte size of the file
"""
global state_count, state_size
state_count[state] = get_key_number(state_count, state) + 1
state_size[state] = get_key_number(state_size, state) + size
def get_key(j, key):
"""
Get the key value from the dictionary object
@param j: The dictionary object
@param key: The key value (or empty string)
"""
return j[key] if key in j else '';
def get_key_number(j, key):
"""
Get the key value from the dictionary object
@param j: The dictionary object
@param key: The key value (or zero)
"""
return j[key] if key in j else 0;
def get_info(rel_path):
"""
Get the file information from the Arkivum REST API
@param rel_path: The path to the file on the Arkivum server
"""
# Do not verify the SSL certificate
r = requests.get('https://'+
gdsc.omero.ARKIVUM_SERVER+
'/api/2/files/fileInfo/'+urllib.quote(rel_path),
verify=False)
# What to do here? Arkivum has a 10 minute delay
# between copying a file and the ingest starting. So it may
# not show in the API just yet.
if r.status_code == 200:
try:
return r.json()
except:
pass
else:
error("REST API response code: "+str(r.status_code))
return {}
def get_option(config, option, section = gdsc.omero.ARK_ARKIVUM_ARCHIVER):
"""
Get the option from the Arkivum section (or return None)
@param config: The ConfigParser
@param option: The option
@param section: The section
"""
if config.has_option(section, option):
return config.get(section, option)
return None
def process(path):
"""
Archive the file
@param path: The file path
"""
global options, state_count, state_size
log("Processing file " + path)
if os.path.islink(path):
warn("Skipping symlink: %s" % path)
return gdsc.omero.JOB_IGNORE
r = os.stat(path)
if not S_ISREG(r.st_mode):
raise Exception("File does not exist: %s" % path)
# Record steps to the .ark file
ark_file = gdsc.omero.get_ark_path(options.archive_log, path)
if not os.path.isfile(ark_file):
raise Exception("Missing archive record file: %s" % ark_file)
log(" Archive record = " + ark_file)
config = configparser.RawConfigParser()
config.read(ark_file)
if not config.has_section(gdsc.omero.ARK_ARKIVUM_ARCHIVER):
config.add_section(gdsc.omero.ARK_ARKIVUM_ARCHIVER)
archived = False
try:
# Create the path in the archive
full_path = path
drive, path = os.path.splitdrive(path)
path, filename = os.path.split(path)
# Check path is relative (so can be joined)
index = 0
while os.path.isabs(path[index:]):
index = index + 1
directory = os.path.join(options.arkivum_root,
options.arkivum_path, path[index:])
if not os.path.exists(directory):
os.makedirs(directory)
# Checksum the file & copy to the archive
ark_path = os.path.join(directory, filename)
log(" Archive path = " + ark_path)
# Store the relative path to the file from the base Arkivum directory
rel_path = os.path.join(options.arkivum_path, path[index:], filename)
# Use the Arkivum default checksums; MD5 and Adler32
md5Digest = get_option(config, 'md5')
adler32Digest = get_option(config, 'adler32')
size = get_option(config, 'size')
if size:
try:
size = int(size)
except:
pass
# Store when the file was copied
file_copied = False
try:
timestamp = float(get_option(config, 'timestamp'))
except:
timestamp = 0
if not (os.path.exists(ark_path)):
# Copy to Arkivum and checksum
log(" Copying to Arkivum")
md5Hasher = hashlib.md5()
adler32sum = 1
size = 0
blocksize = 65536
with open(full_path, 'rb') as f:
with open(ark_path, 'wb') as f2:
buf = f.read(blocksize)
while len(buf) > 0:
size = size + len(buf)
f2.write(buf)
md5Hasher.update(buf)
adler32sum = adler32(buf, adler32sum)
buf = f.read(blocksize)
md5Digest = md5Hasher.hexdigest()
adler32Digest = str(adler32sum & 0xffffffff)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'md5', md5Digest)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'adler32',
adler32Digest)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'size', str(size))
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'path', ark_path)
r = os.stat(ark_path)
timestamp = r.st_mtime
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'copied',
time.ctime(timestamp))
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'timestamp',
str(timestamp))
file_copied = True
elif not (size and md5Digest and adler32Digest):
# This occurs when the path to Arkivum already exists.
# (Possible if the first copy failed part way through.)
# Compute the checksum on the original file so the script will
# error later if Arkivum has a bad copy.
log(" Computing checksums")
md5Hasher = hashlib.md5()
adler32sum = 1
size = 0
blocksize = 65536
with open(full_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
size = size + len(buf)
md5Hasher.update(buf)
adler32sum = adler32(buf, adler32sum)
buf = f.read(blocksize)
md5Digest = md5Hasher.hexdigest()
adler32Digest = str(adler32sum & 0xffffffff)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'md5', md5Digest)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'adler32',
adler32Digest)
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'size', str(size))
config.set(gdsc.omero.ARK_FILE_ARCHIVER, 'path', ark_path)
# Report the checksums
log(" MD5 = " + md5Digest)
log(" Adler32 = " + adler32Digest)
log(" Size = %d" % size)
# Checksum the archive copy
log(" Verifying transfer ...")
# Connect to the Arkivum server and get the file information
info = get_info(rel_path)
# Arkivum has a 10 minute ingest delay which means that the API
# may not have a response directly after a file copy. In this case
# it is fine to just return Running. Re-running this later should find
# the file.
if (len(info) == 0):
msg = "No file information available from Arkivum"
if (file_copied or
time.time() - timestamp < 600):
# Initial copy / less than 10 minutes
error(msg)
addState('unknown', size)
return gdsc.omero.JOB_RUNNING
else:
# Arkivum should have responded
raise Exception(msg)
ingestState = get_key(info, 'ingestState')
log(" Ingest state = " + ingestState)
if ingestState != 'FINAL':
# Wait until Arkivum has processed the file
msg = "Waiting for ingest to complete"
if (file_copied or
time.time() - timestamp < 6000):
# Initial copy / less than 100 minutes
log(" " + msg)
else:
# Arkivum should have ingested by now so show an error
error(msg)
addState('initial', size)
return gdsc.omero.JOB_RUNNING
size2 = get_key(info, 'size')
# Compare size
if (size != size2):
raise Exception("Archived file has different size: %d != %d" %
(size, size2))
log(" Size OK")
# Compare checksums
md5Digest2 = get_key(info, 'md5')
# Note:
# The adler32 value is used by Arkivum but not available via the API.
# For now we will just store it but not check it.
if (md5Digest != md5Digest2):
raise Exception("Archived file has different checksum")
log(" MD5 OK")
# Get the archive state
state = get_key(info, 'replicationState')
log(" Arkivum replication state = " + state)
# TODO? - log when the file changes state, e.g. red > amber > green
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'State', state)
# summarise the amount of data in each replication state
addState(state, size)
if state == options.state:
# Delete the original if the archiving is complete
os.remove(full_path)
status = "Archived"
config.set(gdsc.omero.ARK_ARKIVUM_ARCHIVER, 'Archived',
time.ctime())
archived = True
# Create a symlink to the Arkivum location allowing access
# (albeit at a reduced speed if the file is not on the appliance)
# This is only available on unix
try:
os.symlink(ark_path, full_path)
log(" Created link from original path to Arkivum")
except:
pass
else:
status = "Pending"
log(" Status = " + status)
finally:
# Record the stage we reached
with open(ark_file, 'w') as f:
config.write(f)
if archived:
return gdsc.omero.JOB_FINISHED
return gdsc.omero.JOB_RUNNING
def process_job(job_file):
"""
Process the archive job file
@param job_file: The job file path
"""
global options, file_status, paths
log("Processing job " + job_file)
# Open the job file
job = configparser.RawConfigParser()
job.optionxform = lambda option: option
job.read(job_file)
# Clear previous job errors
if (job.has_option(gdsc.omero.JOB_INFO, 'error')):
job.remove_option(gdsc.omero.JOB_INFO, 'error')
# Count the number of files to process
size = 0
for (path, status) in job.items(gdsc.omero.JOB_FILES):
if path in file_status:
# This has already been done
continue
if status == gdsc.omero.JOB_RUNNING:
size = size + 1
if size:
job.set(gdsc.omero.JOB_INFO, 'status', gdsc.omero.JOB_RUNNING)
# Process the files
log("Processing %d file%s" % (size, '' if size == 1 else 's'))
error_flag = False
running = 0
for (path, status) in job.items(gdsc.omero.JOB_FILES):
new_status = file_status.get(path)
if new_status:
# To prevent double processing of files, update the status
# if this is not the first time we see this file.
#
# Note: this appears to be poor management of the status as it is
# replicated through all job files which must be kept in sync.
# However the status can be determined in this script in the
# process() method. This allows a job file to have its status set
# to running for all files to allow restarting the job.
# Also note that tagging of images for archiving has respected
# the many-to-many image-to-file relationship and should prevent
# an image that has been tagged as archived from being processed
# again. This only occurs when the tag has been added again
# for testing or when testing by manually
# manipulating the job files.
job.set(gdsc.omero.JOB_FILES, path, new_status)
status = new_status
if status == gdsc.omero.JOB_RUNNING:
# This is still running
running = running + 1
elif status == gdsc.omero.JOB_RUNNING:
# This is the first time we process this 'running' file
try:
# The process method returns the status or throws an exception
status = process(path)
if status == gdsc.omero.JOB_FINISHED:
# This has been archived
# Build a list of paths that have been archived
paths.append(path)
else:
# This is still running
running = running + 1
except Exception as e:
status = gdsc.omero.JOB_ERROR
# Record the error in the job file
job.set(gdsc.omero.JOB_INFO, 'error', str(e))
error("An error occurred: %s" % e)
# Record the status of this file the first time it is processed
file_status[path] = status
# Record the status change in the job file
if status != gdsc.omero.JOB_RUNNING:
job.set(gdsc.omero.JOB_FILES, path, status)
if status == gdsc.omero.JOB_ERROR:
error_flag = True
break
# If finished or error then move the job file
dir = ''
email_address = ''
if error_flag:
dir = os.path.join(options.archive_job, gdsc.omero.JOB_ERROR)
# If an error then only email the admin
elif running == 0:
dir = os.path.join(options.archive_job, gdsc.omero.JOB_FINISHED)
# Only email the user when finished
email_address = get_option(job, 'email', gdsc.omero.JOB_INFO)
if dir:
# This is complete
status = os.path.basename(dir)
job.set(gdsc.omero.JOB_INFO, 'complete', time.strftime("%c"))
job.set(gdsc.omero.JOB_INFO, 'status', status)
# Save changes to the job file
with open(job_file, 'w') as f:
job.write(f)
if dir:
# This is complete. E-mail the job file to the user/admin
email_results(email_address, job_file, status)
# Move to the processed folder
log("Moving %s to %s" % (job_file, dir))
shutil.move(job_file, dir)
def check_dir(path, carp=True):
"""
Check the path exists
@param path: The path
@param carp: Raise exception if the path does not exist, otherwise warn
"""
if not os.path.isdir(path):
if carp:
raise Exception("Path is not a directory: %s" % path)
else:
error("Path is not a directory: %s" % path)
def banner(title):
"""
Write a banner
@param title the banner title
"""
size = len(title)
banner = '-=' * int(size/2)
if (len(banner) < size):
banner = banner + '-'
log(banner)
log(title)
log(banner)
# Gather our code in a main() function
def main():
parser = init_options()
global options, state_count, state_size, file_status, paths
state_count = {}
state_size = {}
file_status = {}
paths = []
(options, args) = parser.parse_args()
try:
pid_file = gdsc.omero.PIDFile(
os.path.join(options.archive_job,
os.path.basename(__file__) + '.pid'))
except Exception as e:
die("Cannot start process: %s" % e)
banner("Archive Files to Arkivum")
try:
check_dir(options.archive_log)
check_dir(options.arkivum_root)
check_dir(os.path.join(options.arkivum_root, options.arkivum_path))
check_dir(options.archive_job)
check_dir(os.path.join(options.archive_job, gdsc.omero.JOB_RUNNING))
check_dir(os.path.join(options.archive_job, gdsc.omero.JOB_FINISHED))
check_dir(os.path.join(options.archive_job, gdsc.omero.JOB_ERROR))
# Get the running job files
job_dir = os.path.join(options.archive_job, gdsc.omero.JOB_RUNNING)
_, _, filenames = next(os.walk(job_dir), (None, None, []))
n = len(filenames)
log("Processing %d job%s" % (n, gdsc.omero.pleural(n)))
for path in filenames:
process_job(os.path.join(job_dir, path))
# Open the registers
register = gdsc.omero.Register(options.to_archive, False)
archived = gdsc.omero.Register(options.archived)
# Add all running files to the to_archive register.
# Note: If the script errors part way through the jobs then this
# will be incomplete. The register is only used for reporting so
# this is not a blocker.
# TODO - create a script that can create the to_archive register from
# the currently running job files
running = []
for (k, v) in file_status.items():
if v == gdsc.omero.JOB_RUNNING:
running.append(k)
register.save(running)
# Add archived files to the archived register
size = len(paths)
if size:
log("Archived %d file%s" % (size, '' if size == 1 else 's'))
archived.add_list(paths)
# Summarise the amount of data in each replication state
banner("Replication State Summary")
for key in state_count:
bytes = state_size[key]
log("State %s : %d file%s : %d byte%s (%s)" % (key,
state_count[key], gdsc.omero.pleural(state_count[key]),
bytes, gdsc.omero.pleural(bytes), gdsc.omero.convert(bytes)))
except Exception as e:
fatal("An error occurred: %s" % e)
pid_file.delete()
# Standard boilerplate to call the main() function to begin
# the program.
if __name__ == '__main__':
main()
| 33.672245 | 80 | 0.588121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9,348 | 0.387193 |
4730cdda8c75e103b2413e71694e53ee48400e4d | 3,980 | py | Python | projects/Yolov3/convert.py | shellhue/detectron2 | a027f2fe0dc21eedd201727515c4e963cd007ec0 | [
"Apache-2.0"
] | 3 | 2019-12-18T09:04:21.000Z | 2020-04-21T08:31:26.000Z | projects/Yolov3/convert.py | shellhue/detectron2 | a027f2fe0dc21eedd201727515c4e963cd007ec0 | [
"Apache-2.0"
] | 4 | 2021-06-08T20:51:59.000Z | 2022-03-12T00:12:46.000Z | projects/Yolov3/convert.py | shellhue/detectron2 | a027f2fe0dc21eedd201727515c4e963cd007ec0 | [
"Apache-2.0"
] | 1 | 2020-03-14T05:39:43.000Z | 2020-03-14T05:39:43.000Z | import torch.nn as nn
import numpy as np
import torch
import os
from detectron2.config import get_cfg
from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, launch
from detectron2.evaluation import COCOEvaluator, verify_results
from yolov3 import add_yolov3_config
def load_darknet_weights(weights, modules):
with open(weights, 'rb') as f:
# (int32) version info: major, minor, revision
version = np.fromfile(f, dtype=np.int32, count=3)
# (int64) number of images seen during training
seen = np.fromfile(f, dtype=np.int64, count=1)
# the rest are weights
weights = np.fromfile(f, dtype=np.float32)
print(version, seen)
print(weights.shape)
ptr = 0
paired_modules = []
param_count = 0
for i, module in enumerate(modules):
if isinstance(module, nn.Conv2d):
if not module.bias is None:
paired_modules.append([module])
param_count += module.weight.numel()
param_count += module.bias.numel()
else:
paired_modules.append([module, modules[i+1]])
param_count += module.weight.numel()
param_count += modules[i+1].bias.numel() * 4
print("param_count:", param_count)
for conv_bn_modules in paired_modules:
conv = conv_bn_modules[0]
bn = conv_bn_modules[1] if len(conv_bn_modules) == 2 else None
out_channel, in_channel, kernel_h, kernel_w = conv.weight.size()
if bn:
assert bn.bias.size()[0] == out_channel, "conv and bn is not paired"
# Bias
bn_b = torch.from_numpy(weights[ptr:ptr + out_channel]).view_as(bn.bias)
bn.bias.data.copy_(bn_b)
ptr += out_channel
# Weight
bn_w = torch.from_numpy(weights[ptr:ptr + out_channel]).view_as(bn.weight)
bn.weight.data.copy_(bn_w)
ptr += out_channel
# Running Mean
bn_rm = torch.from_numpy(weights[ptr:ptr + out_channel]).view_as(bn.running_mean)
bn.running_mean.data.copy_(bn_rm)
ptr += out_channel
# Running Var
bn_rv = torch.from_numpy(weights[ptr:ptr + out_channel]).view_as(bn.running_var)
bn.running_var.data.copy_(bn_rv)
ptr += out_channel
else:
# Load conv. bias
conv_b = torch.from_numpy(weights[ptr:ptr + out_channel]).view_as(conv.bias)
conv.bias.data.copy_(conv_b)
ptr += out_channel
# Load conv. weights
num_w = conv.weight.numel()
conv_w = torch.from_numpy(weights[ptr:ptr + num_w]).view_as(conv.weight)
conv.weight.data.copy_(conv_w)
ptr += num_w
print("parsed:", ptr)
print("succeed.")
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = get_cfg()
add_yolov3_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
cfg = setup(args)
model = DefaultTrainer.build_model(cfg)
modules = model.get_conv_bn_modules()
for m in modules:
print(m.weight.size())
load_darknet_weights(args.initial_weights, modules)
save_path = os.path.join(args.output_dir, "yolov3.pth")
torch.save(model.state_dict(), save_path)
print("model save to", save_path)
if __name__ == "__main__":
parser = default_argument_parser()
parser.add_argument("--initial_weights", metavar="FILE", help="path to initial weights file")
parser.add_argument("--output_dir", help="dir to save weights file")
args = parser.parse_args()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| 35.221239 | 97 | 0.626131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 465 | 0.116834 |
4732b7772eda8e5da2f3077d9fda0eed21160738 | 6,102 | py | Python | examples/al_logistic.py | kirthevasank/mps | 2ba818c361e467841f6bbe0ef47a1e833ef315d3 | [
"MIT"
] | 3 | 2019-08-11T22:50:51.000Z | 2021-08-03T06:39:04.000Z | examples/al_logistic.py | kirthevasank/mps | 2ba818c361e467841f6bbe0ef47a1e833ef315d3 | [
"MIT"
] | null | null | null | examples/al_logistic.py | kirthevasank/mps | 2ba818c361e467841f6bbe0ef47a1e833ef315d3 | [
"MIT"
] | 3 | 2019-08-11T22:53:00.000Z | 2021-04-15T13:16:43.000Z | """
An active learning example on a 1D parametric problem.
-- kandasamy@cs.cmu.edu
To execute this example, you will need to install the Edward probabilistic
programming language (http://edwardlib.org).
See http://edwardlib.org/iclr2017 for instructions.
"""
# pylint: disable=invalid-name
# pylint: disable=no-name-in-module
# pylint: disable=abstract-method
from argparse import Namespace
import numpy as np
from scipy.optimize import minimize
# Local
from mps.exd.domains import EuclideanDomain
from mps.exd.goal_oriented_exd import GoalOrientedExperimentDesigner #guided_exd_args
from mps.exd.experiment_caller import EuclideanFunctionCaller
from mps.prob.disc_prob_examples import BayesianLogisticWithGaussianNoise
from mps.policies import mps
from mps.policies import random
def get_problem_params(options=None, reporter=None):
""" Returns model parameters. """
a = 2.1
b = 7
c = 6
eta2 = 0.01
true_theta = np.array([a, b, c, eta2])
x_domain = EuclideanDomain([[0, 10]])
prior_info = {
'a': Namespace(distro='normal_1d', vi_distro='normal_1d', mu=2.0, sigma=0.2),
'b': Namespace(distro='normal_1d', vi_distro='normal_1d', mu=6.0, sigma=2.0),
'c': Namespace(distro='normal_1d', vi_distro='normal_1d', mu=5.0, sigma=2.0),
'eta2': Namespace(distro='const_1d', vi_distro='const_1d', const=eta2),
}
model = BayesianLogisticWithGaussianNoise(x_domain, None, prior_info,
options=options, reporter=reporter)
experiment_eval_func = lambda x: model.sample_y_giv_x_t(1, x, true_theta)[0]
experiment_caller = EuclideanFunctionCaller(experiment_eval_func, x_domain,
'gauss_logistic')
return true_theta, model, experiment_caller
def compute_lwn_least_squares_est(X, Y, min_successes=20):
""" Tries optimizing using different starting points until a minimum number
of successful points are reached.
Model is specified a function model_pred(params, X) that returns the predicted
Y for X given by a model specified by params.
Function choose_init_pt chooses a random initial point.
"""
# Model prediction ---------------------------------------------
def _model_pred(params):
""" Computes the sum-of-squares error for params. """
a, b, c = params
return a / (1 + np.exp(b * (X.ravel() - c)))
# Sum of squared errors ----------------------------------------
def _sse(params):
""" Computes the sum-of-squares error for params. """
y_preds = _model_pred(params)
return np.sum(np.square(Y - y_preds))
# Choose the init point ----------------------------------------
def _choose_init_pt():
""" Chooses an initial point. """
return np.random.random((3,)) * np.array([4, 10, 1])
locs = [] # locations of points we reach from optimization
vals = [] # function values at locs
iters_ran = 0
max_iters = 10 * min_successes
while len(locs) < min_successes:
x0 = _choose_init_pt()
optim_result = minimize(_sse, x0, method='L-BFGS-B')
if optim_result.success:
locs.append(optim_result.x)
vals.append(optim_result.fun)
iters_ran += 1
if iters_ran == max_iters:
break
if len(vals) > 0:
i_min = np.argmin(vals)
best_abc = locs[i_min]
else:
best_abc = optim_result.x
best_sse = _sse(best_abc)
eta2 = best_sse / float(len(X))
ret = list(best_abc) + [eta2]
return np.array(ret)
class GaussLogisticProblem(GoalOrientedExperimentDesigner):
""" Describe problem for Surfactant based active Learning. """
def __init__(self, experiment_caller, worker_manager, model, true_theta,
options=None, reporter=None, *args, **kwargs):
""" Constructor. """
self.true_theta = true_theta
super(SurfactantProblem, self).__init__(experiment_caller, worker_manager,
model, self._penalty, self._true_penalty, options=options, reporter=reporter,
*args, **kwargs)
def _penalty(self, theta, X, Y):
""" The penalty function. """
raw_X = self.experiment_caller.get_raw_domain_coords(X)
est_theta = compute_lwn_least_squares_est(raw_X, Y)
norm_err = (theta - est_theta) / (self.true_theta + 0.001)
ret = np.linalg.norm(norm_err)**2
return ret
def _true_penalty(self, X, Y):
""" The True penalty. """
return self._penalty(self.true_theta, X, Y)
class GaussLogisticActiveLearnerMPS(GaussLogisticProblem, mps.MPSExperimentDesigner):
""" Active Learning on the Bayesian Logistic Model with Posterior Sampling. """
pass
class GaussLogisticActiveLearnerMO(GaussLogisticProblem,
mps.MyopicOracleExperimentDesigner):
""" Active Learning on the Bayesian Logistic Model with the Oracle policy. """
pass
class GaussLogisticActiveLearnerRandom(GaussLogisticProblem,
random.EuclideanRandomExperimentDesigner):
""" Random Designer on GaussLogistic problem. """
pass
def main():
""" Main function. """
budget = 40
true_theta, model, experiment_caller = get_problem_params()
worker_manager = SyntheticWorkerManager(1)
# Random sampling
print('\nRandom designer:')
worker_manager.reset()
rand_options = load_options_for_policy('rand')
rand_designer = GaussLogisticActiveLearnerRandom(experiment_caller, worker_manager,
model, true_theta, options=rand_options)
rand_designer.run_experiments(budget)
# Random sampling
print('\nOracle designer:')
worker_manager.reset()
mo_options = load_options_for_policy('mo')
mo_designer = GaussLogisticActiveLearnerMO(experiment_caller, worker_manager,
model, true_theta, options=mo_options)
mo_designer.run_experiments(budget)
# Posterior sampling
print('\nMPS designer:')
worker_manager.reset()
mps_options = load_options_for_policy('mps')
mps_designer = GaussLogisticActiveLearnerMPS(experiment_caller, worker_manager,
model, true_theta, options=mps_options)
mps_designer.run_experiments(budget)
if __name__ == '__main__':
main()
| 36.759036 | 86 | 0.687807 | 1,511 | 0.247624 | 0 | 0 | 0 | 0 | 0 | 0 | 1,774 | 0.290724 |
5b25cfe6974828d106200cab739a7140d4fcfc56 | 306 | py | Python | settings/settings.py | guilhermegch/telegram-twitter-bot | 463517569e60d4caab5bf4fd36deac9b7f1bc2cd | [
"MIT"
] | 2 | 2020-12-24T15:10:43.000Z | 2021-02-13T05:23:10.000Z | settings/settings.py | guilhermegch/telegram-twitter-bot | 463517569e60d4caab5bf4fd36deac9b7f1bc2cd | [
"MIT"
] | 1 | 2021-02-26T16:30:36.000Z | 2021-02-26T16:30:36.000Z | settings/settings.py | guilhermegch/telegram-twitter-bot | 463517569e60d4caab5bf4fd36deac9b7f1bc2cd | [
"MIT"
] | null | null | null | import os
from dotenv import load_dotenv
load_dotenv()
TELEGRAM_TOKEN = os.getenv("TELEGRAM_TOKEN")
CHAT_ID = os.getenv("CHAT_ID")
API_KEY = os.getenv("API_KEY")
API_SECRET_KEY = os.getenv("API_SECRET_KEY")
ACCESS_TOKEN = os.getenv("ACCESS_TOKEN")
ACCESS_TOKEN_SECRET = os.getenv("ACCESS_TOKEN_SECRET")
| 23.538462 | 54 | 0.784314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 85 | 0.277778 |
5b262fe1cfdd19c11d28767c67a03711bd313651 | 12,917 | py | Python | rsbeams/rsstats/kinematic.py | radiasoft/rsbeams | 1a35c9222ee727a580d626b1791b2c29c1215346 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2017-11-03T18:06:27.000Z | 2022-03-02T22:03:14.000Z | rsbeams/rsstats/kinematic.py | radiasoft/rsbeams | 1a35c9222ee727a580d626b1791b2c29c1215346 | [
"ECL-2.0",
"Apache-2.0"
] | 29 | 2018-01-14T16:48:28.000Z | 2022-03-25T21:17:36.000Z | rsbeams/rsstats/kinematic.py | radiasoft/rsbeams | 1a35c9222ee727a580d626b1791b2c29c1215346 | [
"ECL-2.0",
"Apache-2.0"
] | 23 | 2018-01-16T19:41:20.000Z | 2020-09-18T11:17:16.000Z | #!/usr/bin/env python
import sys
import argparse as arg
import numpy as np
from scipy.constants import e, c, m_e, physical_constants
from future.utils import iteritems
# TODO: Priority #1: Need to go back to the principle that user puts in whatever units they desire
# TODO: cont. All calculations internall are cgs and then conversion is done to put in the form user requests
# TODO: Priority #2: Comprehensive set of tests for combinations of input and output units
# ERROR: kinematic -v 299788543.885 --unit SI: This returns numbers that are in eV but the units claim they're SI
# TODO: Add functionality to the Converter class for stand-alone use in scripts/notebooks
# TODO: Add output conversion to SI units
# TODO: Dynamically adjust printout to use sensible unit scale? (i.e. not just eV but MeV, GeV, etc if more readable)
# TODO: Add check when E is given to make sure E>mc**2
m_e_ev = physical_constants['electron mass energy equivalent in MeV'][0] * 1e6
m_e_kg = m_e
ev_per_kg = m_e_ev / m_e_kg
parser = arg.ArgumentParser(description="Calculate relativistic, kinematic quantities based on input of one initial"
"quantity (see options below). The input may be made in appropriate "
"electron volt based unit quantity or SI unit (if set). The particle type"
"defaults to electron though any desired mass may set in eV/c**2 or kg "
"(if appropriate flag set). ")
input = parser.add_mutually_exclusive_group(required=True)
input.add_argument("-p", "--momentum", dest="momentum", type=float, help="Input momentum value. Default unit: eV/c")
input.add_argument("-v", "--velocity", dest="velocity", type=float, help="Input velocity value. Default unit: m/s")
input.add_argument("-E", "--energy", dest="energy", type=float, help="Input velocity value. Default unit eV")
input.add_argument("-KE", "--kenergy", dest="kenergy", type=float, help="Input kinetic energy value. Default unit eV")
input.add_argument("-bg", "--betagamma", dest="betagamma", type=float, help="Input beta*gamma value. Default unit none")
input.add_argument("-b", "--beta", dest="beta", type=float, help="Input beta value. Default unit none")
input.add_argument("-g", "--gamma", dest="gamma", type=float, help="Input gamma value. Default unit none")
parser.add_argument("-m", "--mass", dest="mass", type=float, help="Optional: Value of particle mass to use.")
parser.add_argument("--mass_unit", dest="mass_unit", choices=["SI", "eV"], default="eV",
help="Set mass units. Options are:\n"
"'SI' for standard SI units on all inputs.\n"
"'eV' for the respective electron volt based unit "
"for all inputs.\nDefaults to 'eV'.\n")
parser.add_argument("--unit", dest="input_unit", choices=["SI", "eV"], default="eV",
help="Set input units. Options are:\n"
"'SI' for standard SI units on all inputs.\n"
"'eV' for the respective electron volt based unit "
"for all inputs.\nDefaults to 'eV'.\n")
parser.add_argument("--output_unit", dest="output_unit", choices=["SI", "eV"], default="eV",
help="Set output unit for mass and kinetmatics. Options are:\n"
"'SI' for standard SI units on all outputs.\n"
"'eV' for the respective electron volt based unit "
"for all outputs.\nDefaults to 'eV'.\n")
class Converter:
"""
Converter works by taking the input kinematic quantity and then always calculating beta and gamma;
all other kinematic quantity calculations are then performed in terms of beta and gamma.
"""
def __init__(self, momentum=None, velocity=None, energy=None, kenergy=None,
betagamma=None, beta=None, gamma=None, mass=None,
mass_unit='eV', input_unit='eV', output_unit='eV',
start_parser=None):
"""
Class that takes in a single kinematic quantity and particle mass and returns a list of other kinematic
quantities. Options for input and the output are:
- Velocity
- Beta
- Gamma
- Momentum
- Normalized momentum (beta * gamma)
- Energy
- Kinetic Energy
Currently only works through passing in a parser object with appropriate settings
Args:
start_parser: Parser object containing necessary settings.
Returns:
None
Prints results
"""
if start_parser:
args = start_parser.parse_args()
self.args = {key: getattr(args, key) for key in vars(args)}
else:
self.args = {k: v for k, v in iteritems(locals())}
for key in ['mass_unit', 'input_unit', 'output_unit']:
assert self.args[key] == 'eV' or self.args[key] == 'SI', "Units must be given as SI or eV"
declaration = 0
for key in ['momentum', 'velocity', 'energy', 'kenergy', 'betagamma', 'beta', 'gamma']:
if self.args[key] is not None:
declaration += 1
assert declaration == 1, "One and only one initial kinematic quantity must be provided"
# Convert to eV for internal use
if self.args['input_unit'] == 'SI':
self._unit_convert(input_unit='SI', input_dict=self.args)
# Method to call based on the kinematic quantity the user inputs
self.startup = {"momentum": self.start_momentum,
"velocity": self.start_velocity,
"energy": self.start_energy,
"kenergy": self.start_kenergy,
"betagamma": self.start_betagamma,
"beta": self.start_beta,
"gamma": self.start_gamma}
# Store quantities needed for output or method to calculate that quantity
self.outputs = {"momentum": self.calculate_momentum,
"velocity": self.calculate_velocity,
"energy": self.calculate_energy,
"kenergy": self.calculate_kenergy,
"betagamma": self.calcuate_betagamma,
"beta": None,
"gamma": None,
"p_unit": "eV/c" * (self.args['output_unit'] == 'eV') + "kg * m/s" * (self.args['output_unit'] == 'SI'),
"e_unit": "eV" * (self.args['output_unit'] == 'eV') + "J" * (self.args['output_unit'] == 'SI'),
"mass": None,
"mass_unit": "eV/c^2" * (self.args['output_unit'] == 'eV') + "kg" * (self.args['output_unit'] == 'SI'),
"input": None,
"input_unit": None,
"input_type": None}
# Match mass unit to input units. Print mass in units the user used for input though.
if self.args['mass_unit'] == "eV":
if self.args['mass']:
self.mass = self.args['mass'] * (1 * (self.args['input_unit'] == 'eV') + 1 / ev_per_kg * (self.args['input_unit'] == 'SI'))
self.outputs["mass"] = self.args['mass']
else:
self.mass = m_e_ev
self.outputs["mass"] = m_e_ev
elif self.args['mass_unit'] == "SI":
if self.outputs['mass']:
self.mass = self.outputs['mass'] * (1 * (self.outputs['input_unit'] == 'SI') + 1 / ev_per_kg * (self.outputs['input_unit'] == 'eV'))
self.outputs["mass"] = self.outputs['mass']
else:
self.mass = m_e_ev
self.outputs["mass"] = m_e_ev / ev_per_kg
def __call__(self, *args, silent=False, **kwargs):
# Set beta and gamma based on the input value
for key, value in self.args.items():
if value is not None and key in self.startup:
self.outputs["input"] = value
self.outputs["input_type"] = key
self.outputs["beta"], self.outputs["gamma"] = self.startup[key](value)
# Find correct unit names for the printout
if self.args["input_unit"] == "eV":
input_unit = "eV" * (self.outputs["input_type"] == 'energy') + \
"eV" * (self.outputs["input_type"] == 'kenergy') + \
"eV/c" * (self.outputs["input_type"] == 'momentum') + \
"m/s" * (self.outputs["input_type"] == 'velocity') + ""
else:
input_unit = "J" * (self.outputs["input_type"] == 'energy') + \
"J" * (self.outputs["input_type"] == 'kenergy') + \
"kg*m/s" * (self.outputs["input_type"] == 'momentum') + \
"" * (self.outputs["input_type"] == 'beta') + \
"m/s" * (self.outputs["input_type"] == 'velocity') + ""
self.outputs["input_unit"] = input_unit
# Set all derived kinematic quantities
for key, value in self.outputs.items():
if callable(value):
self.outputs[key] = value(**self.outputs)
print_string = """
Based on an input {input_type} of {input} {input_unit}
For a particle with mass: {mass} {mass_unit}
velocity: {velocity} m/s
beta: {beta}
gamma: {gamma}
momentum: {momentum} {p_unit}
beta * gamma: {betagamma}
energy: {energy} {e_unit}
kinetic energy: {kenergy} {e_unit}
"""
if self.args['output_unit'] == 'SI':
self._unit_convert(input_unit='eV', input_dict=self.outputs)
if not silent:
print(print_string.format(**self.outputs))
return self.outputs
# All start methods used to convert input kinematic quantity to beta and gamma
@staticmethod
def start_velocity(velocity):
"""
Calculate beta and gamma based on velocity.
Args:
velocity: Particle velocity in m/s.
Returns:
(beta, gamma)
"""
beta = velocity / c
return beta, 1. / np.sqrt(1 - beta**2)
@staticmethod
def start_gamma(gamma):
"""
Calculate beta and gamma based on gamma.
Args:
gamma: Relativistic gamma, unitless.
Returns:
(beta, gamma)
"""
return np.sqrt(1. - 1 / gamma**2), gamma
@staticmethod
def start_beta(beta):
"""
Calculate beta and gamma based on beta
Args:
beta: Relavistic beta, unitless
Returns:
(beta, gamma)
"""
return beta, 1 / np.sqrt(1. - beta**2)
@staticmethod
def start_betagamma(betagamma):
"""
Calculate beta and gamma based on beta * gamma
Args:
betagamma: Normalized momentum beta * gamma
Returns:
(beta, gamma)
"""
beta = betagamma / np.sqrt(1 + betagamma**2)
return beta, 1. / np.sqrt(1 - beta**2)
def start_momentum(self, momentum):
normalized_momentum = momentum / self.mass
beta = normalized_momentum / np.sqrt(1 + normalized_momentum**2)
gamma = 1 / np.sqrt(1 - beta**2)
return beta, gamma
def start_energy(self, energy):
gamma = energy / self.mass
beta = np.sqrt(1. - 1 / gamma**2)
return beta, gamma
def start_kenergy(self, kenergy):
gamma = kenergy / self.mass + 1.
return np.sqrt(1. - 1 / gamma**2), gamma
# All calculate methods are called to get necessary kinematic quantities
def calculate_momentum(self, beta, gamma, **kwargs):
return beta * gamma * self.mass
def calculate_energy(self, gamma, **kwargs):
return gamma * self.mass
def calculate_kenergy(self, gamma, **kwargs):
return self.mass * (gamma - 1)
def calcuate_betagamma(self, beta, gamma, **kwargs):
return beta * gamma
def calculate_velocity(self, beta, **kwargs):
return beta * c
def _unit_convert(self, input_unit, input_dict):
# momentum energy kenergy
conversion_factors = {
'momentum': 1 / e * c,
'energy': 1 / e,
'kenergy': 1 / e,
'mass': 1 / e * c**2
}
for key in conversion_factors:
print(key, input_dict[key], (conversion_factors[key]), 2 * (input == 'SI') - 1)
if input_dict[key]:
input_dict[key] = input_dict[key] * (conversion_factors[key])**(2 * (input_unit == 'SI') - 1)
if __name__ == "__main__":
if len(sys.argv) == 1:
parser.print_help()
run_converter = Converter(start_parser=parser)
run_converter()
| 42.630363 | 148 | 0.565379 | 9,191 | 0.711543 | 0 | 0 | 1,180 | 0.091352 | 0 | 0 | 5,739 | 0.444298 |
5b26ea4b942bbea2547de23d670546a45074eb42 | 292 | py | Python | Python Basics/Week 4/assess_ac4_1_1_8.py | sasathornt/Python-3-Programming-Specialization | 34a204662112f8977bdd2831687a020d775d6f39 | [
"MIT"
] | 1 | 2020-04-17T14:22:15.000Z | 2020-04-17T14:22:15.000Z | Python Basics/Week 4/assess_ac4_1_1_8.py | sasathornt/Python-3-Programming-Specialization | 34a204662112f8977bdd2831687a020d775d6f39 | [
"MIT"
] | null | null | null | Python Basics/Week 4/assess_ac4_1_1_8.py | sasathornt/Python-3-Programming-Specialization | 34a204662112f8977bdd2831687a020d775d6f39 | [
"MIT"
] | null | null | null |
##Write code to switch the order of the winners list so that it is now Z to A. Assign this list to the variable z_winners.
winners = ['Alice Munro', 'Alvin E. Roth', 'Kazuo Ishiguro', 'Malala Yousafzai', 'Rainer Weiss', 'Youyou Tu']
z_winners = winners
z_winners.reverse()
print(z_winners) | 36.5 | 122 | 0.732877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 209 | 0.715753 |
5b272e873c9de7f31e79382b02ad299738f19a80 | 1,254 | py | Python | shreddit/app.py | llalon/Shreddit | 4b083121b8909aca39715390c6c61b5b9f11664d | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | shreddit/app.py | llalon/Shreddit | 4b083121b8909aca39715390c6c61b5b9f11664d | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | shreddit/app.py | llalon/Shreddit | 4b083121b8909aca39715390c6c61b5b9f11664d | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | """This module contains script entrypoints for shreddit.
"""
import argparse
import yaml
import logging
import os
import pkg_resources
from shreddit import default_config
from shreddit.shredder import Shredder
CONFIG_FILE_PATH = "/app/config/shreddit.yml"
def generate_empty_config(path: str):
print("Writing shreddit.yml file...")
with open(path, "wb") as f_out:
f_out.write(pkg_resources.resource_string("shreddit", "shreddit.yml.example"))
def main():
if not os.path.isfile(CONFIG_FILE_PATH):
print("No shreddit configuration file was found or provided.")
generate_empty_config(CONFIG_FILE_PATH)
return
with open(CONFIG_FILE_PATH) as fh:
# Not doing a simple update() here because it's preferable to only set attributes that are "whitelisted" as
# configuration options in the form of default values.
user_config = yaml.safe_load(fh)
for option in default_config:
if option in user_config:
default_config[option] = user_config[option]
shredder = Shredder(default_config)
shredder.shred()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("Shreddit aborted by user")
quit()
| 28.5 | 115 | 0.69697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 404 | 0.322169 |
5b28828610a5ad5e04acee2728602c3a07c4c935 | 476 | py | Python | helpers/bnp.py | wim-vdw/bank-statement-converter | c79f56a756778d249b38b871e02cf68d95b2943e | [
"MIT"
] | null | null | null | helpers/bnp.py | wim-vdw/bank-statement-converter | c79f56a756778d249b38b871e02cf68d95b2943e | [
"MIT"
] | null | null | null | helpers/bnp.py | wim-vdw/bank-statement-converter | c79f56a756778d249b38b871e02cf68d95b2943e | [
"MIT"
] | null | null | null | from PyPDF3 import PdfFileReader
class BNPConverter:
def __init__(self, input_file, start_number=1):
self.input_file = input_file
self.start_number = start_number
def get_text_lines(self):
pdf_data = PdfFileReader(self.input_file)
text_lines = []
for page in range(pdf_data.getNumPages()):
page_text = pdf_data.getPage(page).extractText()
text_lines += page_text.split('\n')
return text_lines
| 29.75 | 60 | 0.661765 | 440 | 0.92437 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.008403 |
5b2959e590709993e1842b7c13e5e7666b0d0eed | 426 | py | Python | c15/p280_test1511.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | c15/p280_test1511.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | c15/p280_test1511.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | import time
print(time.time())
def calcProd():
product = 1
for i in range(1, 100000):
product *= i
return product
startTime = time.time()
result = calcProd()
endTime = time.time()
print('The result is %s digits long, took %s seconds to calculate.' % (len(str(result)), (endTime - startTime)))
"""
1586680463.075951
The result is 456569 digits long, took 2.2562198638916016 seconds to calculate.
"""
| 18.521739 | 112 | 0.673709 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 166 | 0.389671 |
5b2a2ee9eb529547d321f575db078e312ed392cd | 3,592 | py | Python | blog/user.py | mahdisj/flask_project | 464f892c4f67b0404c8d44517325f9a43496b22b | [
"BSD-3-Clause"
] | 1 | 2022-01-06T07:38:03.000Z | 2022-01-06T07:38:03.000Z | blog/user.py | mowbish/Blog_Project | 464f892c4f67b0404c8d44517325f9a43496b22b | [
"BSD-3-Clause"
] | 2 | 2021-08-18T18:56:35.000Z | 2021-08-19T13:42:18.000Z | blog/user.py | mowbish/Blog_Project | 464f892c4f67b0404c8d44517325f9a43496b22b | [
"BSD-3-Clause"
] | 4 | 2021-08-12T13:28:13.000Z | 2021-09-01T14:15:10.000Z | from flask import Blueprint
from flask import g
from flask import flash
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from werkzeug.exceptions import abort
from werkzeug.utils import secure_filename
from bson import ObjectId
from blog.auth import login_required
from blog.db import get_db
bp = Blueprint("user", __name__)
@bp.route("/profile")
@login_required
def profile():
return render_template("user/profile.html")
@bp.route("/posts-list/")
@login_required
def posts_list():
db = get_db()
posts = db.post.find({"author_id": ObjectId(g.user["_id"])})
posts = [post for post in posts]
return render_template("user/post_list.html", posts=posts)
@bp.route("/create", methods=("GET", "POST"))
@login_required
def create_post():
if request.method == "POST":
title = request.form.get('title')
content = request.form.get('content')
category = request.form.get('category')
tags = request.form.getlist('tags')
print(tags)
db = get_db()
mytags = list(db.tag.find())
mytags = [mytag['name'] for mytag in mytags]
print(mytags)
for tag in tags:
if tag not in mytags:
print(tag)
db.tag.insert_one(
{"name": tag})
activition = request.form.get('activition')
f = request.files.get('image')
if f:
fname = secure_filename(f.filename)
f.save('blog/static/media/' + fname)
image = fname
else:
image = None
error = None
if not title:
error = "پست شما نیاز به یک اسم دارد."
if not content:
error = "شما مطلبی ننوشته اید!!"
if error is not None:
flash(error)
else:
like, dislike = [], []
db = get_db()
db.post.insert_one({"title": title, "content": content, "category": category, "tag": tags, "image": image,
"activition": activition,
"author_username": g.user["username"], "author_id": g.user["_id"],
"author_image": g.user["image"], "like": like, "dislike":dislike})
db.post.create_index([('title', 'text'), ('content', 'text'), ('author_username', 'text')])
return redirect(url_for("blog.index"))
return render_template("user/create_post.html")
@bp.route("/edit/<string:post_id>", methods=("GET", "POST"))
@login_required
def edit_post(post_id):
db = get_db()
posts = db.post.find({"_id": ObjectId(post_id)})
li = [p for p in posts]
post = li[0]
if request.method == "POST":
title = request.form.get('title')
content = request.form.get('content')
tags = request.form.getlist('tags')
db = get_db()
mytags = list(db.tag.find())
for tag in tags:
if tag not in mytags:
db.post.insert_one(
{"name": tag})
activition = request.form.get('activition')
db.post.update({
'_id': li[0]['_id']
}, {
'$set': {
"title": title, "content": content, "tag": tags, "activition": activition,
}
}, upsert=False, multi=False)
return redirect(url_for("blog.index"))
else:
return render_template("user/edit_post.html", post=post)
| 31.234783 | 119 | 0.548719 | 0 | 0 | 0 | 0 | 3,202 | 0.882094 | 0 | 0 | 670 | 0.184573 |
5b2d3bb33aaf00064a4a81aefce63447edda3498 | 100 | py | Python | Python-Hackerrank/Eye and Identity.py | nihalkhan2810/Data-Structures-Algorithms | 3224fba18dcdbb1c6cc3d7f7a16df4ed0e0a321d | [
"MIT"
] | 10 | 2020-05-02T14:42:15.000Z | 2021-01-26T16:51:47.000Z | Python-Hackerrank/Eye and Identity.py | nihalkhan2810/Data-Structures-Algorithms | 3224fba18dcdbb1c6cc3d7f7a16df4ed0e0a321d | [
"MIT"
] | null | null | null | Python-Hackerrank/Eye and Identity.py | nihalkhan2810/Data-Structures-Algorithms | 3224fba18dcdbb1c6cc3d7f7a16df4ed0e0a321d | [
"MIT"
] | 13 | 2020-03-05T13:31:11.000Z | 2021-01-29T08:14:26.000Z | import numpy
print(str(numpy.eye(*map(int,input().split()))).replace('1',' 1').replace('0',' 0'))
| 20 | 84 | 0.61 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.14 |
5b2ea698bf453d5761c99858fd3248bda11d4d8d | 160 | py | Python | main.py | NChechulin/telegram-renderer-bot | 3b66a550022bce1604a073a2c922d6c9597f508f | [
"MIT"
] | 1 | 2020-09-20T04:36:11.000Z | 2020-09-20T04:36:11.000Z | main.py | NChechulin/telegram-renderer-bot | 3b66a550022bce1604a073a2c922d6c9597f508f | [
"MIT"
] | 2 | 2020-04-04T21:02:49.000Z | 2020-04-06T11:16:17.000Z | main.py | NChechulin/telegram-renderer-bot | 3b66a550022bce1604a073a2c922d6c9597f508f | [
"MIT"
] | null | null | null | """Main file which starts the bot and sets all of the parameters"""
from bot import Bot
if __name__ == '__main__':
bot = Bot('token.txt')
bot.start()
| 20 | 67 | 0.6625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88 | 0.55 |
5b2fe9458d92b9142657735d809e3eae76a1581c | 14,009 | py | Python | tpRigToolkit/tools/controlrig/widgets/controlviewer.py | tpRigToolkit/tpRigToolkit-tools-controlrig | 46e15377d64418bcc4bc8944c3f9537603fa0e9d | [
"MIT"
] | null | null | null | tpRigToolkit/tools/controlrig/widgets/controlviewer.py | tpRigToolkit/tpRigToolkit-tools-controlrig | 46e15377d64418bcc4bc8944c3f9537603fa0e9d | [
"MIT"
] | null | null | null | tpRigToolkit/tools/controlrig/widgets/controlviewer.py | tpRigToolkit/tpRigToolkit-tools-controlrig | 46e15377d64418bcc4bc8944c3f9537603fa0e9d | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains control viewer widget for tpRigToolkit.tools.controlrig
"""
from __future__ import print_function, division, absolute_import
import math
from copy import copy
from Qt.QtCore import QPoint, QPointF, QRect, QRectF, QLineF
from Qt.QtWidgets import QWidget, QCheckBox, QLabel
from Qt.QtGui import QColor, QLinearGradient, QPainter, QPen, QBrush
class ControlViewer(QWidget, object):
"""
Custom 3D viewer to display control shapes
"""
class ShapePool(object):
"""
Stack for the displayed shapes, with a direction conversion to QLines
"""
def __init__(self):
super(ControlViewer.ShapePool, self).__init__()
self._shapes = list()
def get_shapes(self):
return self._shapes
shapes = property(get_shapes)
def __setitem__(self, key, points):
pts = list()
if len(points):
start = points[0]
for pt in points[1:]:
pts.append(QLineF(start, pt))
start = pt
self._shapes[key] = pts
def __iter__(self):
for shape in self._shapes:
yield shape
def flush(self, length):
for idx in range(len(self._shapes) - 1, -1, -1):
self._shapes.pop(idx)
for i in range(length):
self._shapes.append([])
def __init__(self, parent=None):
super(ControlViewer, self).__init__(parent=parent)
self.setObjectName('controlViewer')
self._shapes = list()
self._baked_lines = self.ShapePool()
self._control = None
self._mouse_pos = QPoint(0, 0)
self._mouse_press = False
self._rotate_order = 'XYZ'
self._scale = 30
self._ref = 0.5
self._rotation = 235
self._height_rotate = 60
self._draw_ref = False
self._draw_axis = True
self._gradient_color_1 = QColor(44, 46, 48)
self._gradient_color_2 = QColor(124, 143, 163)
self._control_color = QColor(240, 245, 255)
self._control_line_width = 1.5
gradient = QLinearGradient(QRectF(self.rect()).bottomLeft(), QRectF(self.rect()).topLeft())
gradient.setColorAt(0, self._gradient_color_1)
gradient.setColorAt(1, self._gradient_color_2)
self._background = QBrush(gradient)
self._axis_pen = [QPen(QColor(255, 0, 0), 0.5), QPen(QColor(0, 255, 0), 0.5), QPen(QColor(125, 125, 255), 0.5)]
self._sub_grid_pen = QPen(QColor(74, 74, 75), 0.25)
self._control_pen = QPen(self._control_color, self._control_line_width)
self._ref_display = QCheckBox('joint', self)
sheet = '''
QCheckBox {color:white; background-color: transparent;}
QCheckBox:unchecked {color:rgb(212, 201, 206);}
QCheckBox::indicator {width: 10px;height: 10px;background:rgb(34, 38, 45);
border:1px solid rgb(134, 138, 145);border-radius:5px;}
QCheckBox::indicator:hover {background:rgb(34, 108, 185);border:1px solid white; border-radius:5px;}
QCheckBox::indicator:checked{background:rgb(74, 168, 235);border:2px solid rgb(34, 108, 185); padding:-1px;}
QCheckBox::indicator:checked:hover{background:rgb(74, 168, 235);border:1px solid white; padding:0px;}
'''
self._ref_display.setStyleSheet(sheet)
self._ref_display.setGeometry(5, -2, self._ref_display.width(), self._ref_display.height())
self._ref_display.stateChanged.connect(self._on_toggle_ref)
self._axis_display = QCheckBox('axis', self)
self._axis_display.setStyleSheet(sheet)
self._axis_display.setGeometry(5, 13, self._axis_display.width(), self._axis_display.height())
self._axis_display.stateChanged.connect(self._on_toggle_axis)
self._axis_display.setChecked(True)
self._infos = QLabel('', self)
self._infos.setStyleSheet('QLabel {color:rgb(134, 138, 145); background-color: transparent;}')
@property
def control(self):
return self._control
@control.setter
def control(self, ctrl):
self._control = ctrl
@property
def ref(self):
return self._ref
@ref.setter
def ref(self, value):
self._ref = value
@property
def shapes(self):
return self._shapes
@shapes.setter
def shapes(self, shapes_list):
self._shapes = shapes_list
@property
def control_color(self):
return self._control_color
@control_color.setter
def control_color(self, color):
self._control_color = color
self._control_pen = QPen(self._control_color, self._control_line_width)
def mousePressEvent(self, event):
if event.button() == 1:
self._mouse_press = True
self._mouse_pos = event.pos()
def mouseMoveEvent(self, event):
if self._mouse_press:
delta = self._mouse_pos - event.pos()
self._rotation -= delta.x()
self._height_rotate = min(max(self._height_rotate + delta.y(), 60), 120)
self._mouse_pos = event.pos()
self.update_coords()
def mouseReleaseEvent(self, event):
self._mouse_press = False
def wheelEvent(self, event):
self._scale = max(self._scale + event.delta() / 40, 10)
self.update_coords()
def resizeEvent(self, event):
gradient = QLinearGradient(QRectF(self.rect()).bottomLeft(), QRectF(self.rect()).topLeft())
gradient.setColorAt(0, QColor(44, 46, 48))
gradient.setColorAt(1, QColor(124, 143, 163))
self._background = QBrush(gradient)
def paintEvent(self, event):
painter = QPainter()
painter.begin(self)
painter.setRenderHint(painter.Antialiasing)
painter.setBrush(self._background)
painter.drawRoundedRect(QRect(0, 0, self.size().width(), self.size().height()), 4, 4)
self._draw_grid(painter=painter)
painter.setPen(self._control_pen)
for shape in self._baked_lines:
painter.drawLines(shape)
painter.end()
def load(self, shapes):
"""
Updates the viewport with new shapes, cleaning old stuff and smoothing the shape using
Catmull Rom method
:param shapes:
"""
self._shapes = shapes
for i, shape in enumerate(self._shapes):
if shape.degree != 1 and not shape.smooth:
shape.cvs = self._smooth(copy(shape.cvs), shape.degree, shape.periodic)
shape.smooth = True
shape.apply_transform()
self.update_coords()
def update_coords(self):
"""
Refresh 2D lines viewport array
"""
self._baked_lines.flush(len(self._shapes))
for i, shape in enumerate(self._shapes):
self.set_shape_coords(shape, i)
self.update()
def set_shape_coords(self, shape, shape_index):
"""
This converts shape's transfomred CVs into 2D points for the viewer's drawing
:param shape:
:param shape_index:
"""
points_2d = list()
for pt in shape.transformed_cvs:
points_2d.append(self._convert_3D_to_2D(*pt))
# If the shape is closed, we add the first points to close the loop
if shape.periodic and shape.degree == 1:
points_2d.append(self._convert_3D_to_2D(*shape.transformed_cvs[0]))
self._baked_lines[shape_index] = points_2d
# endregion
# region Private Functions
def _convert_3D_to_2D(self, x, y, z):
"""
Cheap conversion from 3D coordinates to 2D coordinates depending on the view
:param x: int, x coordinate
:param y: int, y coordinate
:param z: int, z coordinate
:return: QPointF, 2D coordinates
"""
_x = x * math.cos(math.radians(self._rotation))
_x -= z * math.cos(math.radians(-self._rotation + 90))
_x *= self._scale
# We do a 2D projection (key to fake the vertical camera rotation)
_y = (x * math.sin(math.radians(self._rotation)) - y + z * math.sin(
math.radians(-self._rotation + 90))) * self._scale
# Round the vertical rotate to achieve a uniform scaling on the shape when the camera turns up and down
_y *= math.cos(math.radians(self._height_rotate))
# Push compensation from the Y attribute of the point
_y += y * self._scale * (math.tan(math.radians(90 - self._height_rotate)) + math.sin(
math.radians(self._height_rotate)))
_y *= -1
# Center the point on the view
_x += self.width() * 0.5
_y += self.height() * 0.5
return QPointF(_x, _y)
def _smooth(self, cv, deg, periodic):
"""
Smoothing the given coordinates (cv) using the Catmull Rom method
# TODO: At this moment, we set the degree as the number of divison of the Catmull Rom method, this is not
# TODO: correct and whe should change this to fit with each DCC method
:param cv:
:param degree:
:param periodic:
:return:
"""
from tpRigToolkit.tools.controlrig.core import controldata
pts = []
cv = cv[:-3]
points_length = len(cv)
# mapping the division's steps
div_map = [j / float(deg) for j in range(deg)]
for i in range(0, points_length + 1):
if (i < 0 or (i - deg) > points_length) and periodic:
continue
if (i <= 0 or (i + deg) > points_length) and not periodic:
continue
p0 = controldata.ControlV(cv[i - 1])
p1 = controldata.ControlV(cv[i if i < points_length else (i - points_length)])
p2 = controldata.ControlV(cv[(i + 1) if (i + 1) < points_length else (i + 1 - points_length)])
p3 = controldata.ControlV(cv[(i + 2) if (i + 2) < points_length else (i + 2 - points_length)])
# CUBIC spline smoothing #
# a = p3 - p2 - p0 + p1
# b = p0 - p1 - a
# c = p2 - p0
# d = p1
# for j in range(deg):
# t = j / float(deg)
# t2 = t**2
# pos = a*t*t2 + b*t2 + c*t + d
# pts.append(pos)
# CATMULL ROM spline smoothing #
a = .5 * (p1 * 2)
b = .5 * (p2 - p0)
c = .5 * (2 * p0 - 5 * p1 + 4 * p2 - p3)
d = .5 * (-1 * p0 + 3 * p1 - 3 * p2 + p3)
for j, t in enumerate(div_map):
pos = a + (b * t) + (c * t * t) + (d * t * t * t)
pts.append(pos)
return pts
def _draw_grid(self, painter):
"""
Draw the grid of the viewport, displaying the main axis
:param painter:
:return:
"""
from tpRigToolkit.tools.controlrig.core import controldata
if self._draw_axis:
parent_main_axis = self._rotate_order
for x in range(3):
self._axis_pen[x].setWidthF(0.5)
self._axis_pen[controldata.axis_eq[parent_main_axis[0]]].setWidthF(1.5)
painter.setPen(self._axis_pen[0])
painter.drawLine(self._convert_3D_to_2D(0, 0, 0), self._convert_3D_to_2D(100, 0, 0))
painter.setPen(self._sub_grid_pen)
step = self._ref if self._ref > 0.3 else (5 * self._ref if self._ref > 0.05 else 50 * self._ref)
rows = int(10 * (1 / step) * 0.75)
for i in range(-rows, rows):
painter.drawLine(self._convert_3D_to_2D(-100, 0, i * step), self._convert_3D_to_2D(100, 0, i * step))
painter.drawLine(self._convert_3D_to_2D(i * step, 0, -100), self._convert_3D_to_2D(i * step, 0, 100))
painter.setPen(self._axis_pen[1])
painter.drawLine(self._convert_3D_to_2D(0, 0, 0), self._convert_3D_to_2D(0, 100, 0))
painter.setPen(self._axis_pen[2])
painter.drawLine(self._convert_3D_to_2D(0, 0, 0), self._convert_3D_to_2D(0, 0, 100))
if self._draw_ref:
painter.setPen(QPen(QColor(125, 165, 185), 0.8))
sp = [[0.0, 0.0, 1.0], [-0.5, 0.0, 0.87], [-0.87, 0.0, 0.5], [-1.0, 0.0, 0.0], [-0.87, 0.0, -0.5],
[-0.5, 0.0, -0.87], [0.0, 0.0, -1.0], [0.5, 0.0, -0.87], [0.87, 0.0, -0.5], [1.0, 0.0, 0.0],
[0.87, 0.0, 0.5], [0.5, 0.0, 0.87], [0.0, 0.0, 1.0], [0.0, 0.7, 0.7], [0.0, 1.0, 0.0],
[0.0, 0.7, -0.7], [0.0, 0.0, -1.0], [0.0, -0.7, -0.7], [0.0, -1.0, 0.0], [-0.5, -0.87, 0.0],
[-0.87, -0.5, 0.0], [-1.0, 0.0, 0.0], [-0.87, 0.5, 0.0], [-0.5, 0.87, 0.0], [0.0, 1.0, 0.0],
[0.5, 0.87, 0.0], [0.87, 0.5, 0.0], [1.0, 0.0, 0.0], [0.87, -0.5, 0.0], [0.5, -0.87, 0.0],
[0.0, -1.0, 0.0], [0.0, -0.7, 0.7], [0.0, 0.0, 1.0]]
for i, p in enumerate(sp[:-1]):
s, e = controldata.ControlV(p), controldata.ControlV(sp[i + 1])
s *= self._ref * 0.5
e *= self._ref * 0.5
painter.drawLine(self._convert_3D_to_2D(*s), self._convert_3D_to_2D(*e))
if self._shapes:
height = 40
info = 'degree%s : %i' % ('s' if self._shapes[0].degree > 1 else '', self._shapes[0].degree)
info += '\nclosed : %s' % ('no', 'yes')[bool(self._shapes[0].periodic)]
if len(self._shapes) > 1:
info += '\nshapes : %i' % len(self._shapes)
height += 20
self._infos.setText(info)
self._infos.setFixedHeight(height)
self._infos.setGeometry(10, self.height() - height, self.width(), self._infos.height())
def _on_toggle_ref(self, state):
self._draw_ref = state
self.repaint()
def _on_toggle_axis(self, state):
self._draw_axis = state
self.repaint()
| 36.481771 | 119 | 0.572346 | 13,589 | 0.970019 | 86 | 0.006139 | 639 | 0.045614 | 0 | 0 | 2,792 | 0.1993 |
5b31e7140c554c44dbfaf956752e0ce5c615d9a0 | 3,647 | py | Python | sinfo/perifericos/views.py | webdesigncuba/Sinfo | 15998b43057b0c0f13083a3017f27740c64239bf | [
"MIT"
] | null | null | null | sinfo/perifericos/views.py | webdesigncuba/Sinfo | 15998b43057b0c0f13083a3017f27740c64239bf | [
"MIT"
] | null | null | null | sinfo/perifericos/views.py | webdesigncuba/Sinfo | 15998b43057b0c0f13083a3017f27740c64239bf | [
"MIT"
] | null | null | null | #
# Created on Sat Dec 25 2021
#
# The MIT License (MIT)
# Copyright (c) 2021 David Cordero Rosales
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Django
from django.shortcuts import render
from django.shortcuts import render, HttpResponseRedirect
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.shortcuts import reverse
from django.urls import reverse_lazy
from django.http import HttpResponse
from django.template.loader import get_template
from django.template import context
from django_renderpdf.views import PDFView
from django.contrib import messages
# Models
from .models import *
# Forms
from .forms import *
class ChasisListView(ListView):
model = Chasis
paginate_by = 10
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
context['title']='Listado de Chasis'
return context
class ChasisCreateView(CreateView):
model = Chasis
form_class = ChasisForm
template_name = 'perifericos/chasis_form.html'
success_url = reverse_lazy('chasislist')
def post(self, request, *args, **kwargs):
print(request.POST)
form = ChasisForm(request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Guardado exitoso')
return HttpResponseRedirect(self.success_url)
self.object = None
context = self.get_context_data(**kwargs)
context['form'] = form
return render(request, self.template_name, context)
print(form.errors)
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
context['title']='Creacion de Chasis'
return context
# def get_success_url(self):
# return reverse('marcalist')
class ChasisUpdateView(UpdateView):
model = Chasis
form_class = ChasisForm
template_name = 'perifericos/chasis_update.html'
success_url = reverse_lazy('chasislist')
def get_context_data(self, *, object_list=None, **kwargs):
print(self.object)
context = super().get_context_data(**kwargs)
context['title'] = 'Edicion de Chasis'
return context
class ChasisDeleteView(DeleteView):
model = Chasis
success_url = reverse_lazy('chasislist')
class ChasisPDF(PDFView):
template_name = 'report.html'
def get_context_data(self, *args, **kwargs):
"""Pass some extra context to the template."""
context = super().get_context_data(*args, **kwargs)
context['chasis'] = Chasis.objects.all()
return context
| 33.458716 | 122 | 0.718124 | 1,963 | 0.538251 | 0 | 0 | 0 | 0 | 0 | 0 | 1,483 | 0.406636 |
5b332c72b302cfe177178082cf6acc6352315dcc | 2,548 | py | Python | autogluon/utils/tabular/ml/models/tab_transformer/hyperparameters/parameters.py | joshr17/autogluon | 893cf20ad761751886a827e2d10710eb6df61291 | [
"Apache-2.0"
] | null | null | null | autogluon/utils/tabular/ml/models/tab_transformer/hyperparameters/parameters.py | joshr17/autogluon | 893cf20ad761751886a827e2d10710eb6df61291 | [
"Apache-2.0"
] | null | null | null | autogluon/utils/tabular/ml/models/tab_transformer/hyperparameters/parameters.py | joshr17/autogluon | 893cf20ad761751886a827e2d10710eb6df61291 | [
"Apache-2.0"
] | null | null | null | from ....constants import BINARY, MULTICLASS, REGRESSION
def get_fixed_params():
""" Parameters that currently cannot be searched during HPO
TODO: HPO NOT CURRENTLY IMPLEMENTED FOR TABTRANSFORMER
Will need to figure out what (in future PR) is "fixed" and what is searchable. """
fixed_params = {'batch_size': 512,
'tab_kwargs': {'n_cont_embeddings': 0,
'n_layers': 1,
'n_heads': 8,
'hidden_dim': 128,
'norm_class_name': 'LayerNorm',
'tab_readout': 'none',
'column_embedding': True,
'shared_embedding': False,
#'n_shared_embs': 8, #8, #careful
'p_dropout': 0.1,
'orig_emb_resid': False,
'one_hot_embeddings': False,
'drop_whole_embeddings': False,
'max_emb_dim': 8,
'lr': 1e-3,
'weight_decay': 1e-6,
'base_exp_decay': 0.95},
'encoders': {'CATEGORICAL': 'CategoricalOrdinalEnc',
'DATETIME' : 'DatetimeOrdinalEnc',
'LATLONG' : 'LatLongQuantileOrdinalEnc',
'SCALAR' : 'ScalarQuantileOrdinalEnc',
'TEXT' : 'TextSummaryScalarEnc'},
'augmentation': {'mask_prob': 0.4,
'num_augs' : 1},
'pretext': 'BERT_pretext',
'n_cont_features': 8,
'fix_attention': False,
'freq': 1,
'pretrain_freq': 100,
'feature_dim': 64,
'epochs': 100,
'pretrain_epochs': 200,
'epochs_wo_improve': 10}
return fixed_params
def get_default_param(problem_type, nunique=None):
params = get_fixed_params()
params['problem_type'] = problem_type
if problem_type==REGRESSION:
params['n_classes'] = 1
elif problem_type==BINARY:
params['n_classes'] = 2
elif problem_type==MULTICLASS:
params['n_classes'] = nunique
return params
| 44.701754 | 86 | 0.431319 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 901 | 0.353611 |
5b346d8eb912e92694627beff6d3cb24dfe08b7c | 327 | py | Python | Accounts/migrations/0014_remove_timetable_class_time.py | Anand911/E-LEARNING-SCLMAXO- | a16c317ae482c91f4f91c967ddc3e498a43ac7e9 | [
"MIT"
] | 1 | 2021-02-14T10:43:21.000Z | 2021-02-14T10:43:21.000Z | Accounts/migrations/0014_remove_timetable_class_time.py | Anand911/E-LEARNING-SCLMAXO- | a16c317ae482c91f4f91c967ddc3e498a43ac7e9 | [
"MIT"
] | 1 | 2021-01-12T07:22:08.000Z | 2021-01-13T19:07:02.000Z | Accounts/migrations/0014_remove_timetable_class_time.py | Anand911/E-LEARNING-SCLMAXO- | a16c317ae482c91f4f91c967ddc3e498a43ac7e9 | [
"MIT"
] | 6 | 2020-12-13T17:46:37.000Z | 2021-02-10T13:47:25.000Z | # Generated by Django 3.1.3 on 2020-12-18 12:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Accounts', '0013_timetable'),
]
operations = [
migrations.RemoveField(
model_name='timetable',
name='class_time',
),
]
| 18.166667 | 47 | 0.590214 | 242 | 0.740061 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.293578 |
5b375610f2df94425cc6ff0fc566d0f540dd1a07 | 93 | py | Python | app/crawlzero/apps.py | rputh055/crawlzerotest | 6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f | [
"MIT"
] | null | null | null | app/crawlzero/apps.py | rputh055/crawlzerotest | 6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f | [
"MIT"
] | null | null | null | app/crawlzero/apps.py | rputh055/crawlzerotest | 6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class crawlzeroConfig(AppConfig):
name = 'crawlzero'
| 15.5 | 33 | 0.763441 | 56 | 0.602151 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 0.11828 |
5b3779a37a92deffcbfba50edaafbeef57f6578d | 1,773 | py | Python | python/code_challenges/stacks_and_queues/stacks_and_queues.py | kylehoac/data-structures-and-algorithms | 52326ffcf27b5cc27863a96db86ece585f3d5e33 | [
"MIT"
] | null | null | null | python/code_challenges/stacks_and_queues/stacks_and_queues.py | kylehoac/data-structures-and-algorithms | 52326ffcf27b5cc27863a96db86ece585f3d5e33 | [
"MIT"
] | 7 | 2021-04-15T23:51:52.000Z | 2021-04-26T17:18:16.000Z | python/code_challenges/stacks_and_queues/stacks_and_queues.py | kylehoac/data-structures-and-algorithms | 52326ffcf27b5cc27863a96db86ece585f3d5e33 | [
"MIT"
] | null | null | null | class Node:
def __init__(self, value, next_=None):
self.value = value
self.next = next_
class Stack:
def __init__(self, top=None):
self.top = top
def push(self, value):
self.top = Node(value, self.top)
def pop(self):
if self.top:
ret = self.top.value
self.top = self.top.next
return ret
return
def peek(self):
if self.top:
return self.top.value
return
def is_empty(self):
if not self.top:
return self.top is None
class Queue:
def __init__(self, front=None):
self.front = front
self.back = None
def enqueue(self, value=None):
if self.front is None:
self.front = self.back = None(value)
else:
self.back.next = None(value)
def dequeue(self):
if self.front is None:
return 'Queue is empty'
ret = self.front.value
self.front = self.front.next
return ret
class Pseudo_queue:
"""This class is a queue
"""
def __init__(self, front, tail):
self.front = front
self.tail = tail
def enqueue(self, value=None):
if (self.front.top == None) and (self.tail.top == None):
self.front.push(value)
return self.front.top.value
if (self.front.top == None) and self.tail.top:
while self.tail.top:
self.front.push(self.tail.pop())
self.tail.push(value)
return self.tail.top.value
self.tail.push(value)
return self.tail.top.value
def dequeue(self):
if (self.front.top == None) and (self.tail.top == None):
return 'this queue is empty buddy'
if (self.front.top == None) and self.tail.top:
while self.tail.top:
self.front.push(self.tail.pop())
self.front.pop()
return self.front.pop()
| 23.959459 | 60 | 0.600113 | 1,765 | 0.995488 | 0 | 0 | 0 | 0 | 0 | 0 | 73 | 0.041173 |
5b37b2fb348d45fd02d8f2acaf70e041233fe2f7 | 1,334 | py | Python | fastflix/encoders/copy/settings_panel.py | AwesomeGitHubRepos/FastFlix | 60adf2b68a13907ac17013cb621867b2b302c101 | [
"MIT"
] | 1 | 2021-06-14T04:35:50.000Z | 2021-06-14T04:35:50.000Z | fastflix/encoders/copy/settings_panel.py | AwesomeGitHubRepos/FastFlix | 60adf2b68a13907ac17013cb621867b2b302c101 | [
"MIT"
] | 1 | 2020-12-24T13:08:56.000Z | 2020-12-24T13:08:56.000Z | fastflix/encoders/copy/settings_panel.py | leonardyan/FastFlix | 01f19c2de74945a4c60db61711aea9d3fe01b0cc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import logging
from qtpy import QtWidgets
from fastflix.encoders.common.setting_panel import SettingPanel
from fastflix.language import t
from fastflix.models.encode import CopySettings
from fastflix.models.fastflix_app import FastFlixApp
logger = logging.getLogger("fastflix")
class Copy(SettingPanel):
profile_name = "copy_settings"
def __init__(self, parent, main, app: FastFlixApp):
super().__init__(parent, main, app)
self.main = main
self.app = app
grid = QtWidgets.QGridLayout()
grid.addWidget(QtWidgets.QLabel(t("This will just copy the video track as is.")), 0, 0)
grid.addWidget(
QtWidgets.QLabel(t("No crop, scale, rotation,flip nor any other filters will be applied.")), 1, 0
)
grid.addWidget(QtWidgets.QWidget(), 2, 0, 10, 1)
grid.addLayout(self._add_custom(disable_both_passes=True), 11, 0, 1, 6)
self.setLayout(grid)
self.hide()
def update_video_encoder_settings(self):
self.app.fastflix.current_video.video_settings.video_encoder_settings = CopySettings()
self.app.fastflix.current_video.video_settings.video_encoder_settings.extra = self.ffmpeg_extras
self.app.fastflix.current_video.video_settings.video_encoder_settings.extra_both_passes = False
| 36.054054 | 109 | 0.715142 | 1,026 | 0.769115 | 0 | 0 | 0 | 0 | 0 | 0 | 162 | 0.121439 |
5b38ee5a6db9bcb14bd052bda891d575c9747173 | 1,611 | py | Python | thermo_images/teste.py | brochinejr/thermo_images | be9f6d6fa26f58301d102a7cdcc8ca32a69a5289 | [
"RSA-MD"
] | null | null | null | thermo_images/teste.py | brochinejr/thermo_images | be9f6d6fa26f58301d102a7cdcc8ca32a69a5289 | [
"RSA-MD"
] | null | null | null | thermo_images/teste.py | brochinejr/thermo_images | be9f6d6fa26f58301d102a7cdcc8ca32a69a5289 | [
"RSA-MD"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
import cv2
from skimage.data import astronaut
from skimage.color import rgb2gray
from skimage.filters import sobel
from skimage.segmentation import felzenszwalb, slic, quickshift, watershed
from skimage.segmentation import mark_boundaries
from skimage.util import img_as_float
image_path=r'/home/ccjunio/PycharmProjects/thermo_images/testes/images/dorso_costa00.jpeg'
img=cv2.imread(image_path)
# img = img_as_float(astronaut()[::2, ::2])
segments_fz = felzenszwalb(img, scale=100, sigma=0.5, min_size=50)
segments_slic = slic(img, n_segments=250, compactness=10, sigma=1)
segments_quick = quickshift(img, kernel_size=3, max_dist=6, ratio=0.5)
gradient = sobel(rgb2gray(img))
segments_watershed = watershed(gradient, markers=250, compactness=0.001)
print("Felzenszwalb number of segments: {}".format(len(np.unique(segments_fz))))
print('SLIC number of segments: {}'.format(len(np.unique(segments_slic))))
print('Quickshift number of segments: {}'.format(len(np.unique(segments_quick))))
fig, ax = plt.subplots(2, 2, figsize=(10, 10), sharex=True, sharey=True)
ax[0, 0].imshow(mark_boundaries(img, segments_fz))
ax[0, 0].set_title("Felzenszwalbs's method")
ax[0, 1].imshow(mark_boundaries(img, segments_slic))
ax[0, 1].set_title('SLIC')
ax[1, 0].imshow(mark_boundaries(img, segments_quick))
ax[1, 0].set_title('Quickshift')
ax[1, 1].imshow(mark_boundaries(img, segments_watershed))
# ax[1, 1].imshow(mark_boundaries(img, segments_watershed))
ax[1, 1].set_title('Compact watershed')
for a in ax.ravel():
a.set_axis_off()
plt.tight_layout()
plt.show() | 38.357143 | 90 | 0.770329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 343 | 0.212911 |
5b3912de771cac5c847fca87045280e13b237dd4 | 2,048 | py | Python | tests/test_params.py | matthewjohnpayne/MPCData | bf5cc4b846437928d3c7e4bfb8d809f8bf9f9dc4 | [
"MIT"
] | null | null | null | tests/test_params.py | matthewjohnpayne/MPCData | bf5cc4b846437928d3c7e4bfb8d809f8bf9f9dc4 | [
"MIT"
] | null | null | null | tests/test_params.py | matthewjohnpayne/MPCData | bf5cc4b846437928d3c7e4bfb8d809f8bf9f9dc4 | [
"MIT"
] | null | null | null | # mpcdata/tests/test_query.py
# import pytest
# Third-party imports
import os
# Import the specific package/module/function we are testing
import mpcdata.params as params
# from .context import mpcdata
def test_required_dictionaries_exist():
"""
Does params.py contain all of the required dictionaries ?
"""
assert hasattr(params, 'urlIDDict') # This is also testing that it's been pulled in from params_masterlists
assert hasattr(params, 'dirDict')
assert hasattr(params, 'fileDict')
assert hasattr(params, 'downloadSpecDict')
def test_required_directory_paths_exist():
"""
Does dirDict contain the required directory paths ?
"""
for item in ['top','code','share','external','internal','test']:
assert item in params.dirDict
def test_expected_directory_paths():
"""
Does dirDict contain the expected directory paths ?
"""
testDir = os.path.realpath(os.path.dirname( __file__ ))
topDir = os.path.realpath(os.path.dirname( testDir ))
shareDir = os.path.join(topDir, 'share')
externalDir = os.path.join(topDir, 'share','data_external')
internalDir = os.path.join(topDir, 'share','data_internal')
testDir = os.path.join(topDir, 'share','data_test')
devDir = os.path.join(topDir, 'share','data_dev')
assert topDir == params.dirDict['top']
assert shareDir == params.dirDict['share']
assert externalDir == params.dirDict['external']
assert internalDir == params.dirDict['internal']
assert testDir == params.dirDict['test']
assert devDir == params.dirDict['dev']
def test_required_filepaths_are_defined():
"""
Does fileDict contain the required directory paths ?
"""
for item in ['external','internal']:#,'test','dev']:
assert item in params.fileDict
def test_required_specs_exist_for_data_downloads():
"""
Does downloadSpecDict contain the required paths ?
"""
for item in ['attemptsMax']:
assert item in params.downloadSpecDict
| 32 | 111 | 0.679199 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 837 | 0.408691 |
5b3953f91cb7e3a0cd0d67201766483abefb5893 | 229 | py | Python | Week 3/Python Track/Permituation.py | Dawit-Getachew/A2SV_Practice | 2fe06d725e0acfe668c6dae98fe3ef6e6e26ef61 | [
"MIT"
] | null | null | null | Week 3/Python Track/Permituation.py | Dawit-Getachew/A2SV_Practice | 2fe06d725e0acfe668c6dae98fe3ef6e6e26ef61 | [
"MIT"
] | null | null | null | Week 3/Python Track/Permituation.py | Dawit-Getachew/A2SV_Practice | 2fe06d725e0acfe668c6dae98fe3ef6e6e26ef61 | [
"MIT"
] | null | null | null | # Enter your code here. Read input from STDIN. Print output to STDOUT
from itertools import permutations
s1 = input().split()
S2 = sorted(tuple(s1[0]))
out = tuple(permutations(S2,int(s1[1])))
for i in out:
print("".join(i))
| 28.625 | 69 | 0.69869 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.310044 |
5b39d240262b13166339eac3bbfa745be468bf90 | 1,685 | py | Python | tests/chainer_tests/functions_tests/math_tests/test_linear_interpolate.py | zaltoprofen/chainer | 3b03f9afc80fd67f65d5e0395ef199e9506b6ee1 | [
"MIT"
] | 3,705 | 2017-06-01T07:36:12.000Z | 2022-03-30T10:46:15.000Z | tests/chainer_tests/functions_tests/math_tests/test_linear_interpolate.py | zaltoprofen/chainer | 3b03f9afc80fd67f65d5e0395ef199e9506b6ee1 | [
"MIT"
] | 5,998 | 2017-06-01T06:40:17.000Z | 2022-03-08T01:42:44.000Z | tests/chainer_tests/functions_tests/math_tests/test_linear_interpolate.py | zaltoprofen/chainer | 3b03f9afc80fd67f65d5e0395ef199e9506b6ee1 | [
"MIT"
] | 1,150 | 2017-06-02T03:39:46.000Z | 2022-03-29T02:29:32.000Z | import numpy
from chainer import functions
from chainer import testing
from chainer import utils
@testing.parameterize(*testing.product({
'shape': [(3, 4), ()],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@testing.fix_random()
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'cuda_device': [0, 1],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'],
})
)
class TestLinearInterpolate(testing.FunctionTestCase):
def setUp(self):
if self.dtype == numpy.float16:
self.check_forward_options.update({
'atol': 1e-3, 'rtol': 1e-3})
self.check_backward_options.update({
'atol': 5e-4, 'rtol': 5e-3})
self.check_double_backward_options.update({
'atol': 5e-3, 'rtol': 5e-2})
def generate_inputs(self):
p = numpy.random.uniform(0, 1, self.shape).astype(self.dtype)
x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
y = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
return p, x, y
def forward(self, inputs, device):
p, x, y = inputs
ret = functions.linear_interpolate(p, x, y)
ret = functions.cast(ret, numpy.float64)
return ret,
def forward_expected(self, inputs):
p, x, y = inputs
expected = p * x + (1 - p) * y
expected = utils.force_array(expected, dtype=numpy.float64)
return expected,
testing.run_module(__name__, __file__)
| 27.622951 | 70 | 0.588131 | 1,055 | 0.626113 | 0 | 0 | 1,543 | 0.915727 | 0 | 0 | 168 | 0.099703 |
5b3c6f097c1609e8407e35a3ba36afcad1ca179f | 1,290 | py | Python | python/exercicios mundo 2/ex36_45.py/ex011.py | LEXW3B/PYTHON | 1ae54ea709c008bd7fab7602e034773610e7985e | [
"MIT"
] | 1 | 2022-01-05T08:51:16.000Z | 2022-01-05T08:51:16.000Z | python/exercicios mundo 2/ex36_45.py/ex011.py | LEXW3B/PYTHON | 1ae54ea709c008bd7fab7602e034773610e7985e | [
"MIT"
] | null | null | null | python/exercicios mundo 2/ex36_45.py/ex011.py | LEXW3B/PYTHON | 1ae54ea709c008bd7fab7602e034773610e7985e | [
"MIT"
] | null | null | null | #45-crie um programa que faça o computador jogar jokenpo com voce.
print('=====JOKENPO=====')
print('')
from random import randint
from time import sleep
itens = ('pedra','papel','tesoura')
computador = randint(0, 2)
print('''FAÇA SUA ESCOLHA
[ 0 ] pedra
[ 1 ] papel
[ 2 ] tesoura
''')
jogador = int(input('Qual a sua jogada ? '))
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO')
sleep(1)
print('computador jogou {}.'.format(itens[computador]))
print('jogador jogou {}.'.format(itens[jogador]))
if computador == 0: #computador jogou pedra
if jogador == 0:
print('EMPATE')
elif jogador == 1:
print('JOGADOR VENCE')
elif jogador == 2:
print('COMPUTADOR VENCE')
else:
print('jogada invalida')
elif computador == 1: #computador jogou papel
if jogador == 0:
print('COMPUTADOR VENCE')
elif jogador == 1:
print('EMPATE')
elif jogador == 2:
print('JOGADOR VENCE')
else:
print('jogada invalida')
elif computador == 2: #computador jogou tesoura
if jogador == 0:
print('JOGADOR VENCE')
elif jogador == 1:
print('COMPUTADOR VENCE')
elif jogador == 2:
print('EMPATE')
else:
print('jogada invalida')
#FIM//A\\ | 20.47619 | 66 | 0.589922 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 504 | 0.390093 |
5b3d1a1a7b9ced330a393b5b36de18c6dc02c5f8 | 5,940 | py | Python | pyvcloud/vcd/vm.py | pacogomez/pyvcloud | 731aded20b999d269472caf65df774c284dd49b6 | [
"Apache-2.0"
] | null | null | null | pyvcloud/vcd/vm.py | pacogomez/pyvcloud | 731aded20b999d269472caf65df774c284dd49b6 | [
"Apache-2.0"
] | 1 | 2017-12-28T13:50:54.000Z | 2017-12-28T17:28:15.000Z | pyvcloud/vcd/vm.py | pacogomez/pyvcloud | 731aded20b999d269472caf65df774c284dd49b6 | [
"Apache-2.0"
] | 1 | 2017-12-28T10:22:55.000Z | 2017-12-28T10:22:55.000Z | # VMware vCloud Director Python SDK
# Copyright (c) 2017 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyvcloud.vcd.client import E
from pyvcloud.vcd.client import EntityType
from pyvcloud.vcd.client import NSMAP
from pyvcloud.vcd.client import RelationType
class VM(object):
"""A helper class to work with Virtual Machines.
"""
def __init__(self, client, href=None, resource=None):
"""Constructor.
:param client: (Client): The client object to communicate with vCD.
:param href: (str): (optional) href of the VM.
:param resource: (:class:`lxml.objectify.StringElement`): (optional)
object describing the VM.
"""
self.client = client
self.href = href
self.resource = resource
if resource is not None:
self.href = resource.get('href')
def reload(self):
"""Updates the xml representation of the VM with the lastest from vCD.
"""
self.resource = self.client.get_resource(self.href)
if self.resource is not None:
self.href = self.resource.get('href')
def modify_cpu(self, virtual_quantity, cores_per_socket=None):
"""Updates the number of CPUs of a VM.
:param virtual_quantity: (int): The number of virtual CPUs to configure
on the VM.
:param cores_per_socket: (int): The number of cores per socket.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that updates the VM.
"""
uri = self.href + '/virtualHardwareSection/cpu'
if cores_per_socket is None:
cores_per_socket = virtual_quantity
item = self.client.get_resource(uri)
item['{' + NSMAP['rasd'] +
'}ElementName'] = '%s virtual CPU(s)' % virtual_quantity
item['{' + NSMAP['rasd'] + '}VirtualQuantity'] = virtual_quantity
item['{' + NSMAP['vmw'] + '}CoresPerSocket'] = cores_per_socket
return self.client.put_resource(uri, item, EntityType.RASD_ITEM.value)
def modify_memory(self, virtual_quantity):
"""Updates the memory of a VM.
:param virtual_quantity: (int): The number of MB of memory to configure
on the VM.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that updates the VM.
"""
uri = self.href + '/virtualHardwareSection/memory'
item = self.client.get_resource(uri)
item['{' + NSMAP['rasd'] +
'}ElementName'] = '%s virtual CPU(s)' % virtual_quantity
item['{' + NSMAP['rasd'] + '}VirtualQuantity'] = virtual_quantity
return self.client.put_resource(uri, item, EntityType.RASD_ITEM.value)
def power_on(self):
"""Powers on the VM.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that operates on the VM.
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
return self.client.post_linked_resource(
self.resource, RelationType.POWER_ON, None, None)
def power_off(self):
"""Powers off the VM.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that operates on the VM.
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
return self.client.post_linked_resource(
self.resource, RelationType.POWER_OFF, None, None)
def power_reset(self):
"""Powers reset the VM.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that operates on the VM.
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
return self.client.post_linked_resource(
self.resource, RelationType.POWER_RESET, None, None)
def undeploy(self, action='default'):
"""Undeploy the VM.
:return: A :class:`lxml.objectify.StringElement` object describing the
asynchronous task that operates on the VM.
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
params = E.UndeployVAppParams(E.UndeployPowerAction(action))
return self.client.post_linked_resource(
self.resource, RelationType.UNDEPLOY, EntityType.UNDEPLOY.value,
params)
def get_cpus(self):
"""Returns the number of CPUs
:return: A dictionary with:
num_cpus: (int): number of cpus
num_cores_per_socket: (int): number of cores per socket
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
return {
'num_cpus':
int(self.resource.VmSpecSection.NumCpus.text),
'num_cores_per_socket':
int(self.resource.VmSpecSection.NumCoresPerSocket.text)
}
def get_memory(self):
"""Returns the amount of memory in MB
:return: (int): Amount of memory in MB
"""
if self.resource is None:
self.resource = self.client.get_resource(self.href)
return int(
self.resource.VmSpecSection.MemoryResourceMb.Configured.text)
| 37.125 | 79 | 0.639394 | 5,139 | 0.865152 | 0 | 0 | 0 | 0 | 0 | 0 | 2,912 | 0.490236 |
5b3d25843802a6a90ece238f7b25e27cfaa9ff9d | 8,105 | py | Python | backend/api/models.py | ezequielaranda/solservicios | 275405e628b17543a42b28a9bc25bea345669aae | [
"MIT"
] | null | null | null | backend/api/models.py | ezequielaranda/solservicios | 275405e628b17543a42b28a9bc25bea345669aae | [
"MIT"
] | 6 | 2020-05-07T19:32:26.000Z | 2021-06-10T22:56:56.000Z | backend/api/models.py | ezequielaranda/solservicios | 275405e628b17543a42b28a9bc25bea345669aae | [
"MIT"
] | null | null | null | from django.db import models
from rest_framework import serializers
from django.utils import timezone
from django.contrib.auth.models import User
class Empresa(models.Model):
nombre = models.CharField(max_length=150, null=True)
domicilio = models.CharField(max_length=150, null=True)
ciudad = models.CharField(max_length=150, null=True)
class Estado(models.Model):
codigo = models.CharField(max_length=8, unique=True)
descripcion = models.CharField(max_length=60)
def __str__(self):
return self.descripcion
class Cliente(models.Model):
nombre_completo = models.CharField(max_length=50)
domicilio = models.CharField(max_length=60)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
def __str__(self):
return self.nombre_completo
class PuntoLimpiezaCliente(models.Model):
nombre_completo = models.CharField(max_length=50)
domicilio = models.CharField(max_length=60, null=True)
cliente = models.ForeignKey(Cliente, on_delete=models.CASCADE)
# Definición de la clase PROVEEDOR
class Proveedor(models.Model):
nombre_completo = models.CharField(max_length=50)
razon_social = models.CharField(max_length=60)
domicilio = models.CharField(max_length=70)
ingresos_brutos = models.CharField(max_length=10)
fecha_inicio_actividades = models.DateField(auto_now=False)
CONDICION_IVA_CHOICES = (
('RI', 'Responsable Inscripto'),
('MO', 'Monotributista'),
('EX', 'Exento'),
('NR', 'No Responsable'),
('CF', 'Consumidor Final'),
)
condicionIVA = models.CharField(max_length=2,choices=CONDICION_IVA_CHOICES)
cuit = models.CharField(max_length=11)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
def __str__(self):
return self.nombre_completo
# Definición de la TIPOS de PRODUCTO
class TipoProducto(models.Model):
descripcion = models.CharField(max_length=50)
codigo = models.CharField(max_length=8, unique=True)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
def __str__(self):
return self.descripcion
# Definición de las FAMILIAS de PRODUCTO
class FamiliaProducto(models.Model):
descripcion = models.CharField(max_length=50)
codigo = models.CharField(max_length=8, unique=True)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
def __str__(self):
return self.descripcion
#Definición de la clase ACCION
class Accion(models.Model):
nombre_completo = models.CharField(max_length=50)
codigo_accion = models.CharField(max_length=8, unique=True)
# Definición de la clase PRODUCTO
class Producto(models.Model):
nombre_completo = models.CharField(max_length=50)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
proveedor = models.ForeignKey(Proveedor, related_name='productos', on_delete=models.SET_NULL, null=True)
tipoProducto = models.ForeignKey(TipoProducto, on_delete=models.SET_NULL, null=True)
familiaProducto = models.ForeignKey(FamiliaProducto, on_delete=models.SET_NULL, null=True)
factor_multiplicacion = models.FloatField(default=1)
def __str__(self):
return self.nombre_completo
# Definición de la clase USUARIO
class Usuario(models.Model):
ROL_USUARIO_CHOICES = (
('EMP', 'Empleado'),
('REP', 'Repartidor'),
('SUP', 'Supervisor'),
('ADM', 'Administrador'),
)
rol_usuario = models.CharField(max_length=3,choices=ROL_USUARIO_CHOICES)
nombre_completo = models.CharField(max_length=50)
accionesPermitidas = models.ManyToManyField(Accion)
class EntregaCliente(models.Model):
fecha_entrega = models.DateField()
fecha_alta_entrega = models.DateField(auto_now=True)
punto_limpieza_cliente = models.ForeignKey(PuntoLimpiezaCliente, on_delete=models.PROTECT)
usuario_alta = models.ForeignKey(User, on_delete=models.DO_NOTHING)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
class ItemEntregaCliente(models.Model):
entregaCliente = models.ForeignKey(EntregaCliente, related_name='itemsEntrega', on_delete=models.CASCADE)
fecha_alta_item_entrega = models.DateField(auto_now=True)
producto = models.ForeignKey(Producto, related_name='productosEntrega', on_delete=models.PROTECT)
cantidad = models.IntegerField()
esEntrega = models.BooleanField()
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
# Definición de las FACTURAS de COMPRA
class FacturaCompra(models.Model):
fecha_factura_compra = models.DateField()
fecha_alta_factura = models.DateField()
proveedor = models.ForeignKey(Proveedor, on_delete=models.PROTECT)
usuario_alta = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
importe_neto_gravado = models.FloatField()
importe_total = models.FloatField()
iva27 = models.FloatField()
iva21 = models.FloatField()
iva105 = models.FloatField()
iva0 = models.FloatField()
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
class ItemsFactura(models.Model):
facturaCompra = models.ForeignKey(FacturaCompra, related_name='itemsFactura', on_delete=models.CASCADE)
producto = models.ForeignKey(Producto, on_delete=models.PROTECT)
cantidad = models.IntegerField()
precio_compra = models.FloatField()
unidad_medida = models.CharField(max_length=10)
alicuotaIVA = models.FloatField()
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
class OrdenCompra(models.Model):
fecha_orden_compra = models.DateField()
fecha_alta_orden = models.DateField()
proveedor = models.ForeignKey(Proveedor, on_delete=models.CASCADE)
usuario_alta = models.ForeignKey(User, on_delete=models.DO_NOTHING)
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
class ItemsOrdenCompra(models.Model):
ordenCompra = models.ForeignKey(OrdenCompra, related_name='itemsOrdenCompra', on_delete=models.CASCADE)
producto = models.ForeignKey(Producto, on_delete=models.CASCADE)
cantidad = models.IntegerField()
estado = models.ForeignKey(Estado, on_delete=models.SET_NULL, null=True)
# Definición del STOCK de los productos
class StockHistoricoProducto(models.Model):
producto = models.ForeignKey(Producto, on_delete=models.CASCADE, related_name='stocks')
#itemFactura = models.ForeignKey(ItemsFactura, unique=True, on_delete=models.CASCADE)
itemFactura = models.OneToOneField(ItemsFactura, on_delete=models.CASCADE, null=True)
itemEntrega = models.OneToOneField(ItemEntregaCliente, on_delete=models.CASCADE, null=True)
fecha_alta = models.DateField()
cantidad = models.IntegerField()
KANBAN_STATIONS = (
('OC_IN', 'Ingreso de Orden de Compra'),
('OC_OUT', 'Egreso de Orden de Compra'),
('ST_IN', 'Ingreso de Stock'),
('ST_OUT', 'Egreso de Stock'),
)
estacion_kanban = models.CharField(null=True, max_length=6, choices=KANBAN_STATIONS)
estado = models.IntegerField(null=True)
comments = models.CharField(null=True, max_length=100)
# Definición de los PRECIOS de los productos
class PrecioHistoricoProducto(models.Model):
fecha_inicio = models.DateField()
importe = models.FloatField()
#isCurrent = models.BooleanField()
producto = models.ForeignKey(Producto, related_name='preciosProducto', on_delete=models.CASCADE)
#itemFactura = models.ForeignKey(ItemsFactura, unique=True, on_delete=models.CASCADE)
itemFactura = models.OneToOneField(ItemsFactura, on_delete=models.CASCADE, null=True)
class Meta:
ordering = ['-fecha_inicio']
# Definición de las acciones realizadas por los usuarios
class AccionesRealizadasUsuario(models.Model):
accion = models.ForeignKey(Accion, on_delete=models.DO_NOTHING)
usuario = models.ForeignKey(User, on_delete=models.DO_NOTHING)
fecha_accion = models.DateField(auto_now=True)
descripcion = models.CharField(max_length=150)
| 41.142132 | 109 | 0.744232 | 7,507 | 0.925077 | 0 | 0 | 0 | 0 | 0 | 0 | 1,001 | 0.123352 |
5b3e1ecc5b38c542cefb82dcef9d99e754c07ad0 | 8,113 | py | Python | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/survey/cmd/passworddump/errors.py | bidhata/EquationGroupLeaks | 1ff4bc115cb2bd5bf2ed6bf769af44392926830c | [
"Unlicense"
] | 9 | 2019-11-22T04:58:40.000Z | 2022-02-26T16:47:28.000Z | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/survey/cmd/passworddump/errors.py | bidhata/EquationGroupLeaks | 1ff4bc115cb2bd5bf2ed6bf769af44392926830c | [
"Unlicense"
] | null | null | null | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/survey/cmd/passworddump/errors.py | bidhata/EquationGroupLeaks | 1ff4bc115cb2bd5bf2ed6bf769af44392926830c | [
"Unlicense"
] | 8 | 2017-09-27T10:31:18.000Z | 2022-01-08T10:30:46.000Z | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: errors.py
import mcl.status
ERR_SUCCESS = mcl.status.MCL_SUCCESS
ERR_INVALID_PARAM = mcl.status.framework.ERR_START
ERR_NOT_IMPLEMENTED = mcl.status.framework.ERR_START + 1
ERR_MARSHAL_FAILED = mcl.status.framework.ERR_START + 2
ERR_GET_API_FAILED = mcl.status.framework.ERR_START + 3
ERR_OPEN_DATA_PIPE_FAILED = mcl.status.framework.ERR_START + 4
ERR_INJECT_SETUP_FAILED = mcl.status.framework.ERR_START + 5
ERR_INJECT_FAILED = mcl.status.framework.ERR_START + 6
ERR_FAILED_TO_FIND_PROCESS = mcl.status.framework.ERR_START + 7
ERR_OPEN_PROCESS_FAILED = mcl.status.framework.ERR_START + 8
ERR_CONNECT_PIPE_FAILED = mcl.status.framework.ERR_START + 9
ERR_READ_PIPE_FAILED = mcl.status.framework.ERR_START + 10
ERR_EXCEPTION = mcl.status.framework.ERR_START + 11
ERR_ALLOC_FAILED = mcl.status.framework.ERR_START + 12
ERR_INJECTION_FINISHED = mcl.status.framework.ERR_START + 13
ERR_GET_EXIT_CODE_FAILED = mcl.status.framework.ERR_START + 14
ERR_MISSING_BUFFER_DATA = mcl.status.framework.ERR_START + 15
ERR_INJECT_WRITE_FAILED = mcl.status.framework.ERR_START + 16
ERR_INJECT_THREAD_ENDED = mcl.status.framework.ERR_START + 17
ERR_INJECT_OPEN_PIPE_FAILED = mcl.status.framework.ERR_START + 18
ERR_INJECT_LOAD_LIBRARY_FAILED = mcl.status.framework.ERR_START + 19
ERR_INJECT_LSA_OPEN_FAILED = mcl.status.framework.ERR_START + 20
ERR_INJECT_LSA_QUERY_FAILED = mcl.status.framework.ERR_START + 21
ERR_INJECT_SAMI_CONNECT_FAILED = mcl.status.framework.ERR_START + 22
ERR_INJECT_SAMR_OPEN_DOMAIN_FAILED = mcl.status.framework.ERR_START + 23
ERR_INJECT_SAMR_ENUM_USERS_FAILED = mcl.status.framework.ERR_START + 24
ERR_INJECT_SAMR_OPEN_USER_FAILED = mcl.status.framework.ERR_START + 25
ERR_INJECT_SAMR_QUERY_USER_FAILED = mcl.status.framework.ERR_START + 26
ERR_INJECT_LSAI_OPEN_POLICY_FAILED = mcl.status.framework.ERR_START + 27
ERR_INJECT_REG_OPEN_FAILED = mcl.status.framework.ERR_START + 28
ERR_INJECT_LSAR_OPEN_SECRET_FAILED = mcl.status.framework.ERR_START + 29
ERR_INJECT_LSAR_QUERY_SECRET_FAILED = mcl.status.framework.ERR_START + 30
ERR_INJECT_POINTER_NULL = mcl.status.framework.ERR_START + 31
ERR_INJECT_EXCEPTION = mcl.status.framework.ERR_START + 32
ERR_REQUIRED_LIBRARY_NOT_LOADED = mcl.status.framework.ERR_START + 33
ERR_INJECT_DIGEST_ENUM_FAILED = mcl.status.framework.ERR_START + 34
ERR_INJECT_DIGEST_GET_LOGON_DATA_FAILED = mcl.status.framework.ERR_START + 35
ERR_INJECT_DIGEST_LOGON_TO_ID_FAILED = mcl.status.framework.ERR_START + 36
ERR_INJECT_DIGEST_LOG_SESS_PASSWD_GET_FAILED = mcl.status.framework.ERR_START + 37
ERR_INJECT_FIND_FUNCTION_1 = mcl.status.framework.ERR_START + 38
ERR_INJECT_FIND_FUNCTION_2 = mcl.status.framework.ERR_START + 39
ERR_INJECT_FIND_FUNCTION_3 = mcl.status.framework.ERR_START + 40
ERR_INJECT_FIND_FUNCTION_4 = mcl.status.framework.ERR_START + 41
ERR_INJECT_FIND_FUNCTION_5 = mcl.status.framework.ERR_START + 42
ERR_INJECT_GPA_FAILED_1 = mcl.status.framework.ERR_START + 50
ERR_INJECT_GPA_FAILED_2 = mcl.status.framework.ERR_START + 51
ERR_INJECT_GPA_FAILED_3 = mcl.status.framework.ERR_START + 52
ERR_INJECT_GPA_FAILED_4 = mcl.status.framework.ERR_START + 53
ERR_INJECT_GPA_FAILED_5 = mcl.status.framework.ERR_START + 54
ERR_INJECT_GPA_FAILED_6 = mcl.status.framework.ERR_START + 55
ERR_INJECT_GPA_FAILED_7 = mcl.status.framework.ERR_START + 56
ERR_INJECT_GPA_FAILED_8 = mcl.status.framework.ERR_START + 57
ERR_INJECT_GPA_FAILED_9 = mcl.status.framework.ERR_START + 58
ERR_INJECT_GPA_FAILED_10 = mcl.status.framework.ERR_START + 59
ERR_UNSUPPORTED_PLATFORM = mcl.status.framework.ERR_START + 60
ERR_INJECT_FUNCTION_INVALID = mcl.status.framework.ERR_START + 61
errorStrings = {ERR_INVALID_PARAM: 'Invalid parameter(s)',
ERR_NOT_IMPLEMENTED: 'Not implemented on this platform',
ERR_MARSHAL_FAILED: 'Marshaling data failed',
ERR_GET_API_FAILED: 'Failed to get required API',
ERR_OPEN_DATA_PIPE_FAILED: 'Open of data pipe for transfer failed',
ERR_INJECT_SETUP_FAILED: 'Setup of necessary injection functions failed',
ERR_INJECT_FAILED: 'Injection into process failed',
ERR_FAILED_TO_FIND_PROCESS: 'Failed to find required process',
ERR_OPEN_PROCESS_FAILED: 'Unable to open process for injection',
ERR_CONNECT_PIPE_FAILED: 'Connect to data pipe failed',
ERR_READ_PIPE_FAILED: 'Read from data pipe failed',
ERR_EXCEPTION: 'Exception encountered',
ERR_ALLOC_FAILED: 'Memory allocation failed',
ERR_INJECTION_FINISHED: 'Injection finished',
ERR_GET_EXIT_CODE_FAILED: 'Get of injected thread exit code failed',
ERR_MISSING_BUFFER_DATA: 'Data returned from injected thread is invalid',
ERR_INJECT_WRITE_FAILED: 'Write of data to pipe failed',
ERR_INJECT_THREAD_ENDED: 'Injection thread has closed abnormally',
ERR_INJECT_OPEN_PIPE_FAILED: 'InjectThread: Open of data pipe failed',
ERR_INJECT_LOAD_LIBRARY_FAILED: 'InjectThread: Failed to load required library',
ERR_INJECT_LSA_OPEN_FAILED: 'InjectThread: LsaOpenPolicy call failed',
ERR_INJECT_LSA_QUERY_FAILED: 'InjectThread: LsaQueryInformationPolicy call failed',
ERR_INJECT_SAMI_CONNECT_FAILED: 'InjectThread: SamIConnect call failed',
ERR_INJECT_SAMR_OPEN_DOMAIN_FAILED: 'InjectThread: SamrOpenDomain call failed',
ERR_INJECT_SAMR_ENUM_USERS_FAILED: 'InjectThread: SamrEnumerateUsersInDomain call failed',
ERR_INJECT_SAMR_OPEN_USER_FAILED: 'InjectThread: SamrOpenUser call failed',
ERR_INJECT_SAMR_QUERY_USER_FAILED: 'InjectThread: SamrQueryInformationUser call failed',
ERR_INJECT_LSAI_OPEN_POLICY_FAILED: 'InjectThread: LsaIOpenPolicyTrusted call failed',
ERR_INJECT_REG_OPEN_FAILED: 'InjectThread: Failed to open registry key',
ERR_INJECT_LSAR_OPEN_SECRET_FAILED: 'InjectThread: LsarOpenSecret call failed',
ERR_INJECT_LSAR_QUERY_SECRET_FAILED: 'InjectThread: LsarQuerySecret call failed',
ERR_INJECT_POINTER_NULL: 'InjectThread: Internal pointer is NULL',
ERR_INJECT_EXCEPTION: 'InjectThread: Exception encountered',
ERR_REQUIRED_LIBRARY_NOT_LOADED: 'Library required for operation not loaded',
ERR_INJECT_DIGEST_ENUM_FAILED: 'InjectThread: LsaEnumerateLogonSessions failed',
ERR_INJECT_DIGEST_GET_LOGON_DATA_FAILED: 'InjectThread: LsaGetLogonSessionData failed',
ERR_INJECT_DIGEST_LOGON_TO_ID_FAILED: 'InjectThread: LogSessHandlerLogonIdToPtr failed',
ERR_INJECT_DIGEST_LOG_SESS_PASSWD_GET_FAILED: 'InjectThread: LogSessHandlerPasswdGet failed',
ERR_INJECT_FIND_FUNCTION_1: 'InjectThread: Pattern match for function LogSessHandlerLogonIdToPtr failed',
ERR_INJECT_FIND_FUNCTION_2: 'InjectThread: Pattern match for function LogSessHandlerPasswdGet failed',
ERR_INJECT_FIND_FUNCTION_3: 'InjectThread: Pattern match for function LogSessHandlerRelease failed',
ERR_INJECT_FIND_FUNCTION_4: 'InjectThread: Pattern match for function StringFree failed',
ERR_INJECT_FIND_FUNCTION_5: 'InjectThread: Pattern match for function LsaEncryptMemory failed',
ERR_INJECT_GPA_FAILED_1: 'InjectThread: Failed to get required procedure address (1)',
ERR_INJECT_GPA_FAILED_2: 'InjectThread: Failed to get required procedure address (2)',
ERR_INJECT_GPA_FAILED_3: 'InjectThread: Failed to get required procedure address (3)',
ERR_INJECT_GPA_FAILED_4: 'InjectThread: Failed to get required procedure address (4)',
ERR_INJECT_GPA_FAILED_5: 'InjectThread: Failed to get required procedure address (5)',
ERR_INJECT_GPA_FAILED_6: 'InjectThread: Failed to get required procedure address (6)',
ERR_INJECT_GPA_FAILED_7: 'InjectThread: Failed to get required procedure address (7)',
ERR_INJECT_GPA_FAILED_8: 'InjectThread: Failed to get required procedure address (8)',
ERR_INJECT_GPA_FAILED_9: 'InjectThread: Failed to get required procedure address (9)',
ERR_INJECT_GPA_FAILED_10: 'InjectThread: Failed to get required procedure address (10)',
ERR_UNSUPPORTED_PLATFORM: 'The desired operation is not supported on this platform.',
ERR_INJECT_FUNCTION_INVALID: 'The function to inject cannot be located'
} | 68.754237 | 109 | 0.824972 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,705 | 0.333416 |
5b3efa495764e79db80a8d873dca101b98d76f42 | 682 | py | Python | meep/__init__.py | mtander/meep | 5b8085014d43f9201b2283bc7d9085f1ace20406 | [
"MIT"
] | null | null | null | meep/__init__.py | mtander/meep | 5b8085014d43f9201b2283bc7d9085f1ace20406 | [
"MIT"
] | null | null | null | meep/__init__.py | mtander/meep | 5b8085014d43f9201b2283bc7d9085f1ace20406 | [
"MIT"
] | null | null | null | from flask import Flask
from config import DefaultConfig
# factory method for creating app objects
def create_app(config=DefaultConfig()):
app = Flask(__name__, instance_relative_config=True)
app.config.from_object(config)
# initialize database and migrations
from meep.models import db, migrate
from meep.models import Address, AreaOfEffect
from meep.models import Coordinate, FuelType
from meep.models import Owner, Line, Project
from meep.models import Radius, Site
db.init_app(app)
migrate.init_app(app, db)
# register blueprints
from meep.resources import project
app.register_blueprint(project.project)
return app
| 27.28 | 56 | 0.752199 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 98 | 0.143695 |
5b3f258a42659e83c84a193664cad0abb36435d6 | 22,508 | py | Python | inversehaar.py | matthewearl/inversehaar | 876cfb055e656e1c75ef25dbfbecdf3ce9f8ad3b | [
"MIT"
] | 20 | 2016-01-18T12:26:06.000Z | 2019-04-15T11:13:49.000Z | inversehaar.py | matthewearl/inversehaar | 876cfb055e656e1c75ef25dbfbecdf3ce9f8ad3b | [
"MIT"
] | null | null | null | inversehaar.py | matthewearl/inversehaar | 876cfb055e656e1c75ef25dbfbecdf3ce9f8ad3b | [
"MIT"
] | 4 | 2017-05-31T02:40:41.000Z | 2020-04-22T10:38:03.000Z | #!/usr/bin/env python
# Copyright (c) 2015 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Invert OpenCV haar cascades.
See http://matthewearl.github.io/2016/01/14/inverse-haar/ for an overview of
how the code works.
"""
__all__ = (
'Cascade',
'inverse_haar',
)
import collections
import sys
import xml.etree.ElementTree
import cv2
import numpy
from docplex.mp.context import DOcloudContext
from docplex.mp.environment import Environment
from docplex.mp.model import Model
# Constants
# URL to connect to DoCloud with. This may need changing to your particular
# URL.
DOCLOUD_URL = 'https://api-oaas.docloud.ibmcloud.com/job_manager/rest/v1/'
# OpenCV preprocesses analysed regions by dividing by the standard deviation.
# Unfortunately this step cannot be modelled with LP constraints, so we just
# allow a reasonably high pixel value. This value should be at least 2, seeing
# as the maximum standard deviation of a set of values between 0 and 1 is 0.5.
MAX_PIXEL_VALUE = 2.0
# Grid classes
class Grid(object):
"""
A division of an image area into cells.
For example, `SquareGrid` divides the image into pixels.
Cell values are represented with "cell vectors", so for example,
`Grid.render_cell_vec` will take a cell vector and produce an image.
"""
@property
def num_cells(self):
"""The number of cells in this grid"""
raise NotImplementedError
def rect_to_cell_vec(self, r):
"""
Return a boolean cell vector corresponding with the input rectangle.
Elements of the returned vector are True if and only if the
corresponding cells fall within the input rectangle
"""
raise NotImplementedError
def render_cell_vec(self, vec, im_width, im_height):
"""Render an image, using a cell vector and image dimensions."""
raise NotImplementedError
class SquareGrid(Grid):
"""
A grid where cells correspond with pixels.
This grid type is used for cascades which do not contain diagonal features.
"""
def __init__(self, width, height):
self._width = width
self._height = height
self.cell_names = ["pixel_{}_{}".format(x, y)
for y in range(height) for x in range(width)]
@property
def num_cells(self):
return self._width * self._height
def rect_to_cell_vec(self, r):
assert not r.tilted
out = numpy.zeros((self._width, self._height), dtype=numpy.bool)
out[r.y:r.y + r.h, r.x:r.x + r.w] = True
return out.flatten()
def render_cell_vec(self, vec, im_width, im_height):
im = vec.reshape(self._height, self._width)
return cv2.resize(im, (im_width, im_height),
interpolation=cv2.INTER_NEAREST)
class TiltedGrid(Grid):
"""
A square grid, but each square consists of 4 cells.
The squares are cut diagonally, resulting in a north, east, south and west
triangle for each cell.
This grid type is used for cascades which contain diagonal features: The
idea is that the area which a diagonal feature should be integrated can be
represented exactly by this structure.
Unfortunately, this is not quite accurate: OpenCV's trainer and detector
always resizes its images so that pixels correspond with one grid cell. As
such cascades which contain diagonal features will not be accurately
inverted by this script, however, they will have more detail as a result of
the grid square subdivision.
"""
def __init__(self, width, height):
self._width = width
self._height = height
self._cell_indices = {(d, x, y): 4 * ((width * y) + x) + d
for y in range(height)
for x in range(width)
for d in range(4)}
self.cell_names = ['cell_{}_{}_{}'.format(x, y, "NESW"[d])
for y in range(height)
for x in range(width)
for d in range(4)]
self._cell_points = numpy.zeros((width * height * 4, 2))
for y in range(height):
for x in range(width):
self._cell_points[self._cell_indices[0, x, y], :] = \
numpy.array([x + 0.5, y + 0.25])
self._cell_points[self._cell_indices[1, x, y], :] = \
numpy.array([x + 0.75, y + 0.5])
self._cell_points[self._cell_indices[2, x, y], :] = \
numpy.array([x + 0.5, y + 0.75])
self._cell_points[self._cell_indices[3, x, y], :] = \
numpy.array([x + 0.25, y + 0.5])
@property
def num_cells(self):
return self._width * self._height * 4
def _rect_to_bounds(self, r):
if not r.tilted:
dirs = numpy.matrix([[0, 1], [-1, 0], [0, -1], [1, 0]])
limits = numpy.matrix([[r.y, -(r.x + r.w), -(r.y + r.h), r.x]]).T
else:
dirs = numpy.matrix([[-1, 1], [-1, -1], [1, -1], [1, 1]])
limits = numpy.matrix([[r.y - r.x,
2 + -r.x -r.y - 2 * r.w,
r.x - r.y - 2 * r.h,
-2 + r.x + r.y]]).T
return dirs, limits
def rect_to_cell_vec(self, r):
dirs, limits = self._rect_to_bounds(r)
out = numpy.all(numpy.array(dirs * numpy.matrix(self._cell_points).T)
>= limits,
axis=0)
return numpy.array(out)[0]
def render_cell_vec(self, vec, im_width, im_height):
out_im = numpy.zeros((im_height, im_width), dtype=vec.dtype)
tris = numpy.array([[[0, 0], [1, 0], [0.5, 0.5]],
[[1, 0], [1, 1], [0.5, 0.5]],
[[1, 1], [0, 1], [0.5, 0.5]],
[[0, 1], [0, 0], [0.5, 0.5]]])
scale_factor = numpy.array([im_width / self._width,
im_height / self._height])
for y in reversed(range(self._height)):
for x in range(self._width):
for d in (2, 3, 1, 0):
points = (tris[d] + numpy.array([x, y])) * scale_factor
cv2.fillConvexPoly(
img=out_im,
points=points.astype(numpy.int32),
color=vec[self._cell_indices[d, x, y]])
return out_im
# Cascade definition
class Stage(collections.namedtuple('_StageBase',
['threshold', 'weak_classifiers'])):
"""
A stage in an OpenCV cascade.
.. attribute:: weak_classifiers
A list of weak classifiers in this stage.
.. attribute:: threshold
The value that the weak classifiers must exceed for this stage to pass.
"""
class WeakClassifier(collections.namedtuple('_WeakClassifierBase',
['feature_idx', 'threshold', 'fail_val', 'pass_val'])):
"""
A weak classifier in an OpenCV cascade.
.. attribute:: feature_idx
Feature associated with this classifier.
.. attribute:: threshold
The value that this feature dotted with the input image must exceed for the
feature to have passed.
.. attribute:: fail_val
The value contributed to the stage threshold if this classifier fails.
.. attribute:: pass_val
The value contributed to the stage threshold if this classifier passes.
"""
class Rect(collections.namedtuple('_RectBase',
['x', 'y', 'w', 'h', 'tilted', 'weight'])):
"""
A rectangle in an OpenCV cascade.
Two or more of these make up a feature.
.. attribute:: x, y
Coordinates of the rectangle.
.. attribute:: w, h
Width and height of the rectangle, respectively.
.. attribute:: tilted
If true, the rectangle is to be considered rotated 45 degrees clockwise
about its top-left corner. (+X is right, +Y is down.)
.. attribute:: weight
The value this rectangle contributes to the feature.
"""
class Cascade(collections.namedtuple('_CascadeBase',
['width', 'height', 'stages', 'features', 'tilted', 'grid'])):
"""
Pythonic interface to an OpenCV cascade file.
.. attribute:: width
Width of the cascade grid.
.. attribute:: height
Height of the cascade grid.
.. attribute:: stages
List of :class:`.Stage` objects.
.. attribute:: features
List of features. Each feature is in turn a list of :class:`.Rect`s.
.. attribute:: tilted
True if any of the features are tilted.
.. attribute:: grid
A :class:`.Grid` object suitable for use with the cascade.
"""
@staticmethod
def _split_text_content(n):
return n.text.strip().split(' ')
@classmethod
def load(cls, fname):
"""
Parse an OpenCV haar cascade XML file.
"""
root = xml.etree.ElementTree.parse(fname)
width = int(root.find('./cascade/width').text.strip())
height = int(root.find('./cascade/height').text.strip())
stages = []
for stage_node in root.findall('./cascade/stages/_'):
stage_threshold = float(
stage_node.find('./stageThreshold').text.strip())
weak_classifiers = []
for classifier_node in stage_node.findall('weakClassifiers/_'):
sp = cls._split_text_content(
classifier_node.find('./internalNodes'))
if sp[0] != "0" or sp[1] != "-1":
raise Exception("Only simple cascade files are supported")
feature_idx = int(sp[2])
threshold = float(sp[3])
sp = cls._split_text_content(
classifier_node.find('./leafValues'))
fail_val = float(sp[0])
pass_val = float(sp[1])
weak_classifiers.append(
WeakClassifier(feature_idx, threshold, fail_val, pass_val))
stages.append(Stage(stage_threshold, weak_classifiers))
features = []
for feature_node in root.findall('./cascade/features/_'):
feature = []
tilted_node = feature_node.find('./tilted')
if tilted_node is not None:
tilted = bool(int(tilted_node.text))
else:
tilted = False
for rect_node in feature_node.findall('./rects/_'):
sp = cls._split_text_content(rect_node)
x, y, w, h = (int(x) for x in sp[:4])
weight = float(sp[4])
feature.append(Rect(x, y, w, h, tilted, weight))
features.append(feature)
tilted = any(r.tilted for f in features for r in f)
if tilted:
grid = TiltedGrid(width, height)
else:
grid = SquareGrid(width, height)
stages = stages[:]
return cls(width, height, stages, features, tilted, grid)
def detect(self, im, epsilon=0.00001, scale_by_std_dev=False):
"""
Apply the cascade forwards on a potential face image.
The algorithm is relatively slow compared to the integral image
implementation, but is relatively terse and consequently useful for
debugging.
:param im:
Image to apply the detector to.
:param epsilon:
Maximum rounding error to account for. This biases the classifier
and stage thresholds towards passing. As a result, passing too
large a value may result in false positive detections.
:param scale_by_std_dev:
If true, divide the input image by its standard deviation before
processing. This simulates OpenCV's algorithm, however the reverse
haar mapping implemented by this script does not account for the
standard deviation divide, so to get the forward version of
`inverse_haar`, pass False.
"""
im = im.astype(numpy.float64)
im = cv2.resize(im, (self.width, self.height),
interpolation=cv2.INTER_AREA)
scale_factor = numpy.std(im) if scale_by_std_dev else 256.
im /= scale_factor * (im.shape[1] * im.shape[0])
for stage_idx, stage in enumerate(self.stages):
total = 0
for classifier in stage.weak_classifiers:
feature_array = self.grid.render_cell_vec(
sum(self.grid.rect_to_cell_vec(r) * r.weight
for r in self.features[classifier.feature_idx]),
im.shape[1], im.shape[0])
if classifier.pass_val > classifier.fail_val:
thr = classifier.threshold - epsilon
else:
thr = classifier.threshold + epsilon
if numpy.sum(feature_array * im) >= thr:
total += classifier.pass_val
else:
total += classifier.fail_val
if total < stage.threshold - epsilon:
return -stage_idx
return 1
class CascadeModel(Model):
"""
Model of the variables and constraints associated with a Haar cascade.
This is in fact a wrapper around a docplex model.
.. attribute:: cell_vars
List of variables corresponding with the cells in the cascade's grid.
.. attribute:: feature_vars
Dict of feature indices to binary variables. Each variable represents
whether the corresponding feature is present.
.. attribute:: cascade
The underlying :class:`.Cascade`.
"""
def __init__(self, cascade, docloud_context):
"""Make a model from a :class:`.Cascade`."""
super(CascadeModel, self).__init__("Inverse haar cascade",
docloud_context=docloud_context)
cell_vars = [self.continuous_var(
name=cascade.grid.cell_names[i],
lb=0., ub=MAX_PIXEL_VALUE)
for i in range(cascade.grid.num_cells)]
feature_vars = {idx: self.binary_var(name="feature_{}".format(idx))
for idx in range(len(cascade.features))}
for stage in cascade.stages:
# Add constraints for the feature vars.
#
# If the classifier's pass value is greater than its fail value,
# then add a constraint equivalent to the following:
#
# feature var set => corresponding feature is present in image
#
# Conversely, if the classifier's pass vlaue is less than its fail
# value, add a constraint equivalent to:
#
# corresponding feature is present in image => feature var set
for classifier in stage.weak_classifiers:
feature_vec = numpy.sum(
cascade.grid.rect_to_cell_vec(r) * r.weight
for r in cascade.features[classifier.feature_idx])
feature_vec /= (cascade.width * cascade.height)
thr = classifier.threshold
feature_var = feature_vars[classifier.feature_idx]
feature_val = sum(cell_vars[i] * feature_vec[i]
for i in numpy.argwhere(
feature_vec != 0.).flatten())
if classifier.pass_val >= classifier.fail_val:
big_num = 0.1 + thr - numpy.sum(numpy.min(
[feature_vec, numpy.zeros(feature_vec.shape)],
axis=0))
self.add_constraint(feature_val - feature_var * big_num >=
thr - big_num)
else:
big_num = 0.1 + numpy.sum(numpy.max(
[feature_vec, numpy.zeros(feature_vec.shape)],
axis=0)) - thr
self.add_constraint(feature_val - feature_var * big_num <=
thr)
# Enforce that the sum of features present in this stage exceeds
# the stage threshold.
fail_val_total = sum(c.fail_val for c in stage.weak_classifiers)
adjusted_stage_threshold = stage.threshold
self.add_constraint(sum((c.pass_val - c.fail_val) *
feature_vars[c.feature_idx]
for c in stage.weak_classifiers) >=
adjusted_stage_threshold - fail_val_total)
self.cascade = cascade
self.cell_vars = cell_vars
self.feature_vars = feature_vars
def set_best_objective(self, minimize=False):
"""
Amend the model with an objective.
The objective used is to maximise the score from each stage of the
cascade.
"""
self.set_objective("min" if minimize else "max",
sum((c.pass_val - c.fail_val) *
self.feature_vars[c.feature_idx]
for s in self.cascade.stages
for c in s.weak_classifiers))
def inverse_haar(cascade, min_optimize=False, max_optimize=False,
time_limit=None, docloud_context=None, lp_path=None):
"""
Invert a haar cascade.
:param cascade:
A :class:`.Cascade` to invert.
:param min_optimize:
Attempt to the solution which exceeds the stage constraints as little
as possible.
:param max_optimize:
Attempt to the solution which exceeds the stage constraints as much
as possible.
:param time_limit:
Maximum time to allow the solver to work, in seconds.
:param docloud_context:
:class:`docplex.mp.context.DOcloudContext` to use for solving.
:param lp_path:
File to write the LP constraints to. Useful for debugging. (Optional).
"""
if min_optimize and max_optimize:
raise ValueError("Cannot pass both min_optimize and max_optimize")
cascade_model = CascadeModel(cascade, docloud_context)
if min_optimize or max_optimize:
cascade_model.set_best_objective(minimize=min_optimize)
if time_limit is not None:
cascade_model.set_time_limit(time_limit)
cascade_model.print_information()
if lp_path:
cascade_model.export_as_lp(path=lp_path)
if not cascade_model.solve():
raise Exception("Failed to find solution")
sol_vec = numpy.array([v.solution_value / MAX_PIXEL_VALUE
for v in cascade_model.cell_vars])
im = cascade_model.cascade.grid.render_cell_vec(sol_vec,
10 * cascade.width,
10 * cascade.height)
im = (im * 255.).astype(numpy.uint8)
return im
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=
'Inverse haar feature object detection')
parser.add_argument('-c', '--cascade', type=str, required=True,
help='OpenCV cascade file to be reversed.')
parser.add_argument('-o', '--output', type=str, required=True,
help='Output image name')
parser.add_argument('-t', '--time-limit', type=float, default=None,
help='Maximum time to allow the solver to work, in '
'seconds.')
parser.add_argument('-O', '--optimize', nargs='?', type=str, const='max',
help='Try and find the "best" solution, rather than '
'just a feasible solution. Pass "min" to find '
'the least best solution.')
parser.add_argument('-C', '--check', action='store_true',
help='Check the result against the (forward) cascade.')
parser.add_argument('-l', '--lp-path',type=str, default=None,
help='File to write LP constraints to.')
args = parser.parse_args()
print "Loading cascade..."
cascade = Cascade.load(args.cascade)
docloud_context = DOcloudContext.make_default_context(DOCLOUD_URL)
docloud_context.print_information()
env = Environment()
env.print_information()
print "Solving..."
im = inverse_haar(cascade,
min_optimize=(args.optimize == "min"),
max_optimize=(args.optimize == "max"),
time_limit=args.time_limit,
docloud_context=docloud_context,
lp_path=args.lp_path)
cv2.imwrite(args.output, im)
print "Wrote {}".format(args.output)
if args.check:
print "Checking..."
ret = cascade.detect(im)
if ret != 1:
print "Image failed the forward cascade at stage {}".format(-ret)
| 36.070513 | 83 | 0.571708 | 16,661 | 0.740226 | 0 | 0 | 2,595 | 0.115292 | 0 | 0 | 9,010 | 0.400302 |
5b41dca0dc7f5a816592454fec9d09eb00293d8b | 199 | py | Python | users/urls.py | a-vek/news-aggregator | d1aafbf7f2eed365ba734912e494af5c92728379 | [
"MIT"
] | null | null | null | users/urls.py | a-vek/news-aggregator | d1aafbf7f2eed365ba734912e494af5c92728379 | [
"MIT"
] | null | null | null | users/urls.py | a-vek/news-aggregator | d1aafbf7f2eed365ba734912e494af5c92728379 | [
"MIT"
] | null | null | null | from django.urls import path
from news.views import scrape, news_list
from . import views
urlpatterns = [
# path('', views.index, name="home"),
# path('newslist', news_list, name="home"),
]
| 22.111111 | 47 | 0.678392 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.40201 |
5b42b16965f1e6f2275117ba1529d214339f949e | 4,735 | py | Python | communication/socket_util.py | compix/MetadataManagerCore | 59ff0bd908d69c624834d72e64f4e0bff01f2a49 | [
"MIT"
] | null | null | null | communication/socket_util.py | compix/MetadataManagerCore | 59ff0bd908d69c624834d72e64f4e0bff01f2a49 | [
"MIT"
] | null | null | null | communication/socket_util.py | compix/MetadataManagerCore | 59ff0bd908d69c624834d72e64f4e0bff01f2a49 | [
"MIT"
] | null | null | null | import socket
import json
import struct
from concurrent.futures import ThreadPoolExecutor
import time
import logging
logger = logging.getLogger(__name__)
def readBlob(sock, size):
chunks = []
bytes_recd = 0
while bytes_recd < size:
chunk = sock.recv(min(size - bytes_recd, 2048))
if chunk == b'':
raise RuntimeError("Socket closed.")
chunks.append(chunk)
bytes_recd = bytes_recd + len(chunk)
return b''.join(chunks)
def readSize(sock):
size = struct.calcsize("L")
data = readBlob(sock, size)
return struct.unpack("L", data)[0]
def recvDict(sock):
size = readSize(sock)
jsonBlob = readBlob(sock, size)
jdict = json.loads(jsonBlob, encoding="utf-8")
return jdict
def sendDict(sock, theDict : dict):
jsonBlob = json.dumps(theDict).encode(encoding="utf-8")
sock.sendall(struct.pack("L", len(jsonBlob)))
sock.sendall(jsonBlob)
class JsonSocket(object):
def __init__(self, timeout=None):
super().__init__()
self.sock : socket = None
self.running = True
self.timeout = timeout
def connectClient(self, port, host = None):
"""
Tries to connect once. Raises an exception if the connection fails.
If the host is None, socket.gethostname() is used.
"""
if host == None:
host = socket.gethostname()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
self.sock.settimeout(self.timeout)
def connectClientInsistently(self, port, host = None):
"""
Tries to repeatedly connect until a connection is established.
If the connection times out a socket.timeout exception is raised.
If the host is None, socket.gethostname() is used.
Note: The alternative "socket.create_connection((host, port))" is very slow.
"""
if host == None:
host = socket.gethostname()
connected = False
tStart = time.time()
while not connected and (self.timeout == None or (time.time() - tStart) < self.timeout):
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
self.sock.settimeout(self.timeout)
connected = True
except:
pass
if not connected:
raise socket.timeout(f"Could not connect to ({host},{port}). Timeout.")
def connectServer(self, port, host = None, numConnections=1):
self.numConnections = numConnections
if host == None:
host = socket.gethostname()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(self.timeout)
self.sock.bind((host, port))
self.sock.listen(numConnections)
def runServer(self):
"""
If the host is None, socket.gethostname() is used.
"""
with ThreadPoolExecutor(max_workers=self.numConnections) as executor:
while self.running:
clientSocket, address = self.sock.accept()
executor.submit(self.processClientSocket, clientSocket, address)
def handleClientSocket(self, clientSocket : socket.socket, address, dataDictionary : dict):
"""
Executed on a different thread. This function is meant to be overriden.
The client socket will be closed automatically after the end of this function.
"""
print(f"Handling client socket with address {address}")
print(dataDictionary)
def processClientSocket(self, clientSocket, address):
dataDictionary = recvDict(clientSocket)
self.handleClientSocket(clientSocket, address, dataDictionary)
clientSocket.close()
def close(self):
self.running = False
self.sock.close()
if __name__ == "__main__":
serverSocket = JsonSocket()
serverSocket.connectServer(5000)
def runServer():
time.sleep(1)
serverSocket.runServer()
g_executor = ThreadPoolExecutor(max_workers=1)
g_executor.submit(runServer)
g_clientSocket = JsonSocket(timeout=10.0)
g_clientSocket.connectClientInsistently(port=5000)
testDict = {"Key":"Val", "Key2":"Val2"}
#sendDict(g_clientSocket.sock, testDict)
g_clientSocket.sock.sendall(b'Invalid message.')
g_clientSocket.close()
g_clientSocket = JsonSocket(timeout=10.0)
g_clientSocket.connectClientInsistently(port=5000)
testDict = {"Key3":"Val3", "Key4":"Val4"}
sendDict(g_clientSocket.sock, testDict)
g_clientSocket.close()
time.sleep(2)
serverSocket.close()
g_executor.shutdown() | 31.993243 | 96 | 0.640127 | 2,980 | 0.629356 | 0 | 0 | 0 | 0 | 0 | 0 | 968 | 0.204435 |
5b4477c7ff32f21e1eedcba6eff6f119beced119 | 1,120 | py | Python | src/api/migrations/0003_auto_20190604_1058.py | pradipta/back-end | 05895b051afc4c8e0cb17db708063d80102e9de5 | [
"MIT"
] | 17 | 2019-05-11T22:15:34.000Z | 2022-03-26T22:45:33.000Z | src/api/migrations/0003_auto_20190604_1058.py | pradipta/back-end | 05895b051afc4c8e0cb17db708063d80102e9de5 | [
"MIT"
] | 390 | 2019-05-23T10:48:57.000Z | 2021-12-17T21:01:43.000Z | src/api/migrations/0003_auto_20190604_1058.py | pradipta/back-end | 05895b051afc4c8e0cb17db708063d80102e9de5 | [
"MIT"
] | 40 | 2019-05-21T14:41:57.000Z | 2021-01-30T13:39:38.000Z | # Generated by Django 2.2.2 on 2019-06-04 15:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("api", "0002_auto_20190522_1618")]
operations = [
migrations.DeleteModel(name="ActiveAdminComment"),
migrations.DeleteModel(name="AdminUser"),
migrations.DeleteModel(name="ArInternalMetadata"),
migrations.DeleteModel(name="Event"),
migrations.DeleteModel(name="GitHubStatistic"),
migrations.DeleteModel(name="GitHubUser"),
migrations.DeleteModel(name="OldCodeSchool"),
migrations.DeleteModel(name="OldLocation"),
migrations.DeleteModel(name="OldTeamMember"),
migrations.DeleteModel(name="Request"),
migrations.DeleteModel(name="Resource"),
migrations.DeleteModel(name="Role"),
migrations.DeleteModel(name="SchemaMigration"),
migrations.DeleteModel(name="Service"),
migrations.DeleteModel(name="SlackUser"),
migrations.DeleteModel(name="Tag"),
migrations.DeleteModel(name="Tagging"),
migrations.DeleteModel(name="Vote"),
]
| 37.333333 | 58 | 0.686607 | 1,035 | 0.924107 | 0 | 0 | 0 | 0 | 0 | 0 | 289 | 0.258036 |
5b458116a42dd7d10c7864067436041dfcef5bf1 | 1,226 | py | Python | Software 1/Practical/Week 09/Practical 14/vector.py | KristoffLiu/YorkCSSolution | cae76529b1e5956a59ac4bd51797a30a5a23fb8d | [
"Apache-2.0"
] | 3 | 2019-11-14T22:04:32.000Z | 2019-11-17T16:07:58.000Z | Software 1/Practical/Week 09/Practical 14/vector.py | KristoffLiu/YorkCSSolution | cae76529b1e5956a59ac4bd51797a30a5a23fb8d | [
"Apache-2.0"
] | null | null | null | Software 1/Practical/Week 09/Practical 14/vector.py | KristoffLiu/YorkCSSolution | cae76529b1e5956a59ac4bd51797a30a5a23fb8d | [
"Apache-2.0"
] | 1 | 2019-11-15T07:52:14.000Z | 2019-11-15T07:52:14.000Z | class Vector:
#exercise 01
def __init__(self,inputlist):
self._vector = []
_vector = inputlist
#exercise 02
def __str__(self):
return "<" + str(self._vector).strip("[]") + ">"
#exercise 03
def dim(self):
return len(self._vector)
#exercise 04
def get(self,index):
return self._vector[index]
def set(self,index,value):
self._vector[index] = value
def scalar_product(self, scalar):
return [scalar * x for x in self._vector]
#exercise 05
def add(self, other_vector):
if not isinstance(other_vector) == True and type(other_vector) == Vector:
raise TypeError
elif not self.dim() == other_vector.dim():
raise ValueError
else:
return self.scalar_product(other_vector)
#exercise 06
def equals(self,other_vector):
if not self.dim() == other_vector.dim():
return False
elif self == other_vector:
return True
else:
for i in range(self.dim()):
if self._vector[i] != other_vector._vector[i]:
return False
else:
return True
| 25.541667 | 81 | 0.551387 | 1,219 | 0.99429 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.066884 |
5b45b7ebc3202d65284b774cf188919577862056 | 370 | py | Python | Stomp/Utils/util.py | phan91/STOMP_agilis | c47394e7b58809fdd3eba539b6fa610eb8b476ce | [
"MIT"
] | null | null | null | Stomp/Utils/util.py | phan91/STOMP_agilis | c47394e7b58809fdd3eba539b6fa610eb8b476ce | [
"MIT"
] | null | null | null | Stomp/Utils/util.py | phan91/STOMP_agilis | c47394e7b58809fdd3eba539b6fa610eb8b476ce | [
"MIT"
] | null | null | null | import re
def replace_all(repls, str):
"""
Applies replacements as described in the repls dictionary on input str.
:param repls: Dictionary of replacements
:param str: The string to be changed
:return: The changed string
"""
return re.sub('|'.join(re.escape(key) for key in repls.keys()),
lambda k: repls[k.group(0)], str) | 30.833333 | 75 | 0.645946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 208 | 0.562162 |
5b46be0de52122650ce7b53028183e21f425fa14 | 185 | py | Python | ex048.py | CarlosEduardoAS/Python-exercicios | c0063660191a86e83f25708239b62b6764a51670 | [
"MIT"
] | null | null | null | ex048.py | CarlosEduardoAS/Python-exercicios | c0063660191a86e83f25708239b62b6764a51670 | [
"MIT"
] | null | null | null | ex048.py | CarlosEduardoAS/Python-exercicios | c0063660191a86e83f25708239b62b6764a51670 | [
"MIT"
] | null | null | null | s = 0
cont = 0
for c in range(1, 501, 2):
if c % 3 == 0:
cont += 1
s += c
print('A soma ente todos os {} ímpares múltiplos de 3 entre 1 e 500 é {}.'.format(cont, s)) | 26.428571 | 91 | 0.524324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.37766 |
5b46cfa3134f9db13338c6b633fbe8af7e2e0a25 | 4,153 | py | Python | source/casual/make/compiler/gcc.py | casualcore/casual-make | 935392bc54d2e1fb86a20406f668774de63a7cb1 | [
"MIT"
] | null | null | null | source/casual/make/compiler/gcc.py | casualcore/casual-make | 935392bc54d2e1fb86a20406f668774de63a7cb1 | [
"MIT"
] | null | null | null | source/casual/make/compiler/gcc.py | casualcore/casual-make | 935392bc54d2e1fb86a20406f668774de63a7cb1 | [
"MIT"
] | null | null | null | import platform
import casual.make.platform.common as common
import casual.make.tools.environment as environment
CXX = common.cxx()
COMPILER = CXX
def warning_directive():
return ["-Wall",
"-Wextra",
"-Werror",
"-Wsign-compare",
"-Wuninitialized",
"-Winit-self",
"-Wno-unused-parameter",
"-Wno-missing-declarations",
"-Wno-noexcept-type",
"-Wno-implicit-fallthrough"
]
OPTIONAL_FLAGS = common.optional_flags()
VERSION_DIRECTIVE = common.casual_build_version()
GITHASH_DIRECTIVE = common.casual_build_commit_hash()
# Linkers
LIBRARY_LINKER = CXX
EXECUTABLE_LINKER = common.executable_linker()
ARCHIVE_LINKER = common.archive_linker()
STD_DIRECTIVE = common.cpp_standard()
# lint stuff
LINT_COMMAND = common.lint_command()
LINT_PRE_DIRECTIVES = common.lint_pre_directives()
OPTIONAL_POSSIBLE_FLAGS = common.optional_possible_flags()
# how can we get emmidate binding like -Wl,-z,now ?
GENERAL_LINK_DIRECTIVE = ["-fPIC"]
def compile_directives(type_of_build, warning_directive):
configuration = VERSION_DIRECTIVE + GITHASH_DIRECTIVE + warning_directive + \
STD_DIRECTIVE + OPTIONAL_FLAGS + OPTIONAL_POSSIBLE_FLAGS
if type_of_build in ['debug', 'analyze']:
configuration += ["-ggdb", "-c", "-fPIC"] if platform.system() == 'Darwin' else [
"-ggdb", "-c", "-fPIC"]
if type_of_build == 'analyze':
configuration += ["-fprofile-arcs", "-ftest-coverage"]
else:
configuration += ["-c", "-O3", "-fPIC", "-pthread"]
return configuration
def link_directives_lib(type_of_build, warning_directive):
configuration = GENERAL_LINK_DIRECTIVE + warning_directive + STD_DIRECTIVE
if type_of_build in ['debug', 'analyze']:
configuration += ["-ggdb", "-dynamiclib"] if platform.system(
) == 'Darwin' else ["-g", "-pthread", "-shared", "-fpic"]
if type_of_build == 'analyze':
configuration += ["-fprofile-arcs"] if platform.system() == 'Darwin' else [
"-O0", "-coverage"]
else:
configuration += ["-dynamiclib", "-O3"] if platform.system() == 'Darwin' else [
"-pthread", "-shared", "-O3", "-fpic"]
return configuration
def link_directives_exe(type_of_build, warning_directive):
configuration = GENERAL_LINK_DIRECTIVE + warning_directive + STD_DIRECTIVE
if type_of_build in ['debug', 'analyze']:
configuration += ["-ggdb"] if platform.system() == 'Darwin' else ["-g","-pthread", "-fpic"]
if type_of_build == 'analyze':
configuration += ["-lgcov", "-fprofile-arcs"] if platform.system() == 'Darwin' else [
"-O0", "-coverage"]
else:
configuration += ["-O3"] if platform.system() == 'Darwin' else [
"-pthread", "-O3", "-fpic"]
return configuration
def link_directives_archive(type_of_build, warning_directive):
configuration = GENERAL_LINK_DIRECTIVE + warning_directive + STD_DIRECTIVE
if type_of_build in ['debug', 'analyze']:
configuration = ["-ggdb"] if platform.system() == 'Darwin' else ["-g"]
else:
configuration = [
"-O3", "-pthread"] if platform.system() == 'Darwin' else []
return configuration
def build_configuration( type_of_build="normal", warning_directive=warning_directive()):
configuration = {
"compiler": COMPILER,
"header_dependency_command": COMPILER + ["-E", "-MMD"] + STD_DIRECTIVE,
"library_linker": LIBRARY_LINKER,
"executable_linker": EXECUTABLE_LINKER,
"archive_linker": ARCHIVE_LINKER,
}
configuration["compile_directives"] = compile_directives(type_of_build, warning_directive)
configuration["link_directives_lib"] = link_directives_lib(
type_of_build,
warning_directive)
configuration["link_directives_exe"] = link_directives_exe(
type_of_build,
warning_directive)
configuration["link_directives_archive"] = link_directives_archive(
type_of_build,
warning_directive)
return configuration
| 30.762963 | 99 | 0.650614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 950 | 0.22875 |
5b46e230e630204c976c8ffe3cfb3b96c0f2ab66 | 3,914 | py | Python | modificar_pacientes.py | Ratius9919/TP-programacion | bbe7040333b0319ca609fa61147519ee9c6434b0 | [
"Apache-2.0"
] | null | null | null | modificar_pacientes.py | Ratius9919/TP-programacion | bbe7040333b0319ca609fa61147519ee9c6434b0 | [
"Apache-2.0"
] | null | null | null | modificar_pacientes.py | Ratius9919/TP-programacion | bbe7040333b0319ca609fa61147519ee9c6434b0 | [
"Apache-2.0"
] | null | null | null | import os
def crear_dni(): #Se solicita un valor y tras las validaciones para verificar si es un dni, se lo devuelve.
valor = False
while valor == False:
try:
dni = input("[?] Por favor, ingrese su DNI: ")
if int(dni) > 1000000 and int(dni) < 200000000:
valor = True
except ValueError:
print("[!] Ingresaste un valor no valido.")
return dni
def escribir_archivo(lista):
with open("Pacientes.txt","a") as pacientes:
pacientes.write(f"{lista}\n")
def leer_archivo():
f = open ("Pacientes.txt", "r")
paciente = f.read()
f.close()
return paciente
def borrar_archivo():
with open("Pacientes.txt","a") as pacientes:
pacientes.truncate(0)
def lista():
pacientes = leer_archivo()
listar = pacientes.split("\n")
return listar
def buscar_dni(dni):
lista_pacientes = lista()
for i in range (len(lista_pacientes)):
paciente = lista_pacientes[i]
buscar = paciente.find(dni)
if buscar != -1:
return i
elif buscar == -1:
return buscar
def modificar_paciente(dni):
ubicacion = buscar_dni(dni)
if ubicacion == -1:
return -1
lista_pacientes = lista()
lista_paciente= lista_pacientes[ubicacion].split(";")
pregunta = int(input("[?] Que desea cambiar? [1] El DNI. [2] El nombre. [3] la edad. [0] Todos. [-1] Nada."))
if pregunta == 1:
lista_pacientes = lista_pacientes.pop(ubicacion)
lista_paciente[pregunta-1] = crear_dni()
if pregunta == 2:
lista_pacientes = lista_pacientes.pop(ubicacion)
nombre = input("[?] Ingrese su nombre por favor: ")
while nombre.isdigit():
nombre = input("[?] Ingrese su nombre por favor, solo letras: ")
lista_paciente[pregunta-1] = nombre
if pregunta == 3:
lista_pacientes = lista_pacientes.pop(ubicacion)
edad = input("[?] Edad: ")
while edad.isdigit() == False:
edad = input("[?] Ingrese su edad nuevamente. Solo numeros: ")
while int(edad) <= 17:
print("[!] Error, no se le puede inscribir si es menor de 17.")
edad = input("[?] Ingrese su edad nuevamente: ")
lista_paciente[pregunta-1] = edad
if pregunta == 0:
lista_pacientes = lista_pacientes.pop(ubicacion)
lista_paciente[0] = crear_dni()
nombre = input("[?] Ingrese su nombre por favor: ")
while nombre.isdigit():
nombre = input("[?] Ingrese su nombre por favor, solo letras: ")
lista_paciente[1] = nombre
edad = input("[?] Edad: ")
while edad.isdigit() == False:
edad = input("[?] Ingrese su edad nuevamente. Solo numeros: ")
while int(edad) <= 17:
print("[!] Error, no se le puede inscribir si es menor de 17.")
edad = input("[?] Ingrese su edad nuevamente: ")
lista_paciente[2] = edad
return lista_paciente
def modificar_lista(dni):
valor = modificar_paciente(dni)
if valor == -1:
print("[!] No se ha encontrado el DNI, volviendo a la pantalla anterior.")
return
lista_original = lista()
ubicacion = buscar_dni(dni)
lista_original.pop(ubicacion)
lista_original.insert(ubicacion,valor)
lista_str = str(lista_original)
lista_str.replace("["," ")
lista_str.replace("]"," ")
lista_str.replace("'"," ")
return lista_original
def main(dni):
valor = buscar_dni(dni)
if valor == -1:
return
lista = modificar_lista(dni)
borrar_archivo()
escribir_archivo(lista)
dni = input("¿Cual es el DNI que desea modificar?")
main(dni) | 30.818898 | 114 | 0.566684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 909 | 0.232184 |
5b47c020381d563cd22c6d5f79397ba250b6cf19 | 266 | py | Python | solutions/pybasic_ex1_4_1.py | mfernandes61/python-basic | fc105db0e2e3af1830d833eef368fd011b4fcf5e | [
"Unlicense"
] | 40 | 2017-02-22T05:35:16.000Z | 2022-01-10T09:28:35.000Z | solutions/pybasic_ex1_4_1.py | mfernandes61/python-basic | fc105db0e2e3af1830d833eef368fd011b4fcf5e | [
"Unlicense"
] | 10 | 2016-12-12T17:06:50.000Z | 2019-06-14T13:34:22.000Z | solutions/pybasic_ex1_4_1.py | mfernandes61/python-basic | fc105db0e2e3af1830d833eef368fd011b4fcf5e | [
"Unlicense"
] | 47 | 2017-03-24T19:48:50.000Z | 2022-02-27T15:53:09.000Z | # Protein sequence given
seq = "MPISEPTFFEIF"
# Split the sequence into its component amino acids
seq_list = list(seq)
# Use a set to establish the unique amino acids
unique_amino_acids = set(seq_list)
# Print out the unique amino acids
print(unique_amino_acids)
| 22.166667 | 51 | 0.781955 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 170 | 0.639098 |
5b47c29aaab3cdf3c4e1671a310113a2c0e1ba3f | 486 | py | Python | campy/private/backends/backend_interactive_console.py | TristenSeth/campy | 9e726c342d682239e1c19e6f5645c0b2167d7fab | [
"MIT"
] | null | null | null | campy/private/backends/backend_interactive_console.py | TristenSeth/campy | 9e726c342d682239e1c19e6f5645c0b2167d7fab | [
"MIT"
] | null | null | null | campy/private/backends/backend_interactive_console.py | TristenSeth/campy | 9e726c342d682239e1c19e6f5645c0b2167d7fab | [
"MIT"
] | null | null | null | from campy.private.backend_base import ConsoleBackendBase
class InteractiveConsoleBackend(ConsoleBackendBase):
def clear_console(self):
pass
def set_console_font(self, font):
pass
def set_console_size(self, console_size):
pass
def get_console_line(self):
return input()
def put_console(self, line, stderr=False):
print(line)
# def echo_console(self):
# pass
# def end_line_console(self):
# pass
| 20.25 | 57 | 0.658436 | 426 | 0.876543 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.152263 |
5b485cb1cc265a14ca9183a9c1f8a057dc3e6b2f | 2,608 | py | Python | apps/examples/simple-example/test/integration/test_it_collect_spawn.py | pcanto-hopeit/hopeit.engine | c17b0438e56940a4d1b2f071cca90ae8b6f70629 | [
"Apache-2.0"
] | 15 | 2020-07-09T17:41:14.000Z | 2021-10-04T20:13:08.000Z | apps/examples/simple-example/test/integration/test_it_collect_spawn.py | pcanto-hopeit/hopeit.engine | c17b0438e56940a4d1b2f071cca90ae8b6f70629 | [
"Apache-2.0"
] | 48 | 2020-07-10T15:16:17.000Z | 2022-03-03T19:46:46.000Z | apps/examples/simple-example/test/integration/test_it_collect_spawn.py | pcanto-hopeit/hopeit.engine | c17b0438e56940a4d1b2f071cca90ae8b6f70629 | [
"Apache-2.0"
] | 3 | 2020-07-08T20:12:58.000Z | 2021-01-10T15:57:21.000Z | import os
import uuid
import pytest # type: ignore
from hopeit.testing.apps import execute_event
from hopeit.server.version import APPS_API_VERSION
from model import Something
from simple_example.collector.collect_spawn import ItemsInfo, ItemsCollected
APP_VERSION = APPS_API_VERSION.replace('.', "x")
@pytest.fixture
def sample_file_ids():
ids = [str(uuid.uuid4()), str(uuid.uuid4())]
for test_id in ids:
json_str = '{"id": "' + test_id + '", "user": {"id": "u1", "name": "test_user"}, ' \
+ '"status": {"ts": "2020-05-01T00:00:00Z", "type": "NEW"}, "history": []}'
os.makedirs(f'/tmp/simple_example.{APP_VERSION}.fs.data_path/', exist_ok=True)
with open(f'/tmp/simple_example.{APP_VERSION}.fs.data_path/{test_id}.json', 'w') as f:
f.write(json_str)
f.flush()
return ids
@pytest.mark.asyncio
async def test_find_two_items(app_config, sample_file_ids): # noqa: F811
payload = ItemsInfo(*sample_file_ids)
result, pp_result, response = await execute_event(app_config=app_config,
event_name='collector.collect_spawn',
payload=payload,
postprocess=True)
assert isinstance(result, list)
assert len(result) == 2
assert result[0].id == sample_file_ids[0]
assert result[1].id == sample_file_ids[1]
assert pp_result == 2
@pytest.mark.asyncio
async def test_find_one_item(app_config, sample_file_ids): # noqa: F811
payload = ItemsInfo(sample_file_ids[0], str(uuid.uuid4()))
result, pp_result, response = await execute_event(app_config=app_config,
event_name='collector.collect_spawn',
payload=payload,
postprocess=True)
assert isinstance(result, Something)
assert result.id == sample_file_ids[0]
assert pp_result == 1
@pytest.mark.asyncio
async def test_find_no_items(app_config, sample_file_ids): # noqa: F811
payload = ItemsInfo(str(uuid.uuid4()), str(uuid.uuid4()))
result, pp_result, response = await execute_event(app_config=app_config,
event_name='collector.collect_spawn',
payload=payload,
postprocess=True)
assert result == ItemsCollected([])
assert pp_result == 0
| 41.396825 | 94 | 0.577071 | 0 | 0 | 0 | 0 | 2,289 | 0.877684 | 1,678 | 0.643405 | 379 | 0.145322 |
5b487a8b99ccb82c0f1a7123624b4438aa8ebb71 | 1,767 | py | Python | app.py | ruturajshete1008/Heart-health-prediction | 75fe4232f19014ed2b6fb2463c815ba023ad7a0d | [
"MIT"
] | 3 | 2018-11-28T04:30:15.000Z | 2019-01-02T09:13:03.000Z | app.py | ruturajshete1008/Heart-health-prediction | 75fe4232f19014ed2b6fb2463c815ba023ad7a0d | [
"MIT"
] | null | null | null | app.py | ruturajshete1008/Heart-health-prediction | 75fe4232f19014ed2b6fb2463c815ba023ad7a0d | [
"MIT"
] | null | null | null | from flask import Flask, jsonify, render_template
import pandas as pd
import os
import pymongo
from flask import send_from_directory
from pymongo import MongoClient
# initialize flask app
app = Flask(__name__)
app.config['JSON_SORT_KEYS'] = False
# read the data and merge it
df_labels = pd.read_csv('train_labels.csv')
df_values = pd.read_csv('train_values.csv')
merged_df = pd.merge(df_values, df_labels, how='inner', on='patient_id')
# filter dataframe for with and w/o HD
merged_df_1 = merged_df.drop(merged_df.index[(merged_df.heart_disease_present.eq(0))])
merged_df_0 = merged_df.drop(merged_df.index[(merged_df.heart_disease_present.eq(1))])
conn = os.environ.get('MONGODB_URI')
if not conn:
conn = 'mongodb://localhost:27017/'
client = MongoClient(conn)
db = client.heart_data
collection = db.train_values
listt = []
for obj in collection.find():
obj.pop("_id")
listt.append(obj)
#build out the routes
@app.route('/')
def home():
return render_template('index.html')
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path,'static','images'),
'favicon.ico', mimetype='image/png')
@app.route('/analysis')
def analysis():
return render_template('analysis.html')
@app.route('/prediction')
def predict():
return render_template('health-prediction.html')
@app.route('/data')
def data():
return render_template('data.html')
@app.route('/chart')
def chart():
# build a dictionary to jsonify into a route
my_data = {"age_hd": list(merged_df_1['age']), "age_no_hd": list(merged_df_0['age'])}
return jsonify(my_data)
@app.route('/table')
def tab_content():
return jsonify(listt)
if __name__ == '__main__':
app.run(debug=True)
| 26.772727 | 89 | 0.710243 | 0 | 0 | 0 | 0 | 773 | 0.437465 | 0 | 0 | 476 | 0.269383 |
5b48b34a9e7a2aa7d8ebb9fa3eabb138aabac88b | 15,103 | py | Python | tests/test_transform.py | braedon/kong-log-bridge | a2f7622f1ad77af93036b221243f556853c34343 | [
"MIT"
] | 2 | 2021-03-10T20:12:28.000Z | 2021-07-24T05:54:46.000Z | tests/test_transform.py | braedon/kong-log-bridge | a2f7622f1ad77af93036b221243f556853c34343 | [
"MIT"
] | null | null | null | tests/test_transform.py | braedon/kong-log-bridge | a2f7622f1ad77af93036b221243f556853c34343 | [
"MIT"
] | null | null | null | import unittest
from kong_log_bridge.transform import transform_log
class Test(unittest.TestCase):
maxDiff = None
def test_transform(self):
test_log = {
"latencies": {
"request": 191,
"kong": 0,
"proxy": 191
},
"service": {
"host": "example.default.80.svc",
"created_at": 1595260351,
"connect_timeout": 60000,
"id": "adc094b9-1359-5576-8973-5f5aac508101",
"protocol": "http",
"name": "example.default.80",
"read_timeout": 60000,
"port": 80,
"path": "/",
"updated_at": 1595260351,
"write_timeout": 60000,
"retries": 5
},
"request": {
"querystring": {
"foo": "bar",
"baz": True
},
"size": "1430",
"uri": "/login",
"url": "https://example.com:8443/login",
"headers": {
"host": "example.com",
"content-type": "application/x-www-form-urlencoded",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"authorization": "Bearer some_token",
"cookie": "__Host-example_login_csrf-zK9kT=some_login_csrf",
"upgrade-insecure-requests": "1",
"connection": "keep-alive",
"referer": "https://example.com/login?continue=https%3A%2F%2Fexample.com%2Foauth2%2Fauthorize%3Fresponse_type%3Dcode%26client_id%3Dexample_client%26scope%3Dopenid%26state%3Dp2DOUg5DvzyFFxE9D%26nonce%3DFjKXc-cZLMHf3ohZQ_HQZQ%26redirect_uri%3Dhttps%253A%252F%252Fexample.com%252Fapp%252Foidc%252Fcallback%26new_login%3Dtrue&client_id=example_client",
"accept-language": "en-US,en;q=0.5",
"user-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:79.0) Gecko/20100101 Firefox/79.0",
"content-length": "478",
"origin": "https://example.com",
"dnt": "1",
"accept-encoding": "gzip, deflate, br"
},
"method": "POST"
},
"client_ip": "1.2.3.4",
"tries": [
{
"balancer_latency": 0,
"port": 8080,
"balancer_start": 1595326603251,
"ip": "10.244.1.139"
}
],
"upstream_uri": "/login",
"response": {
"headers": {
"content-type": "text/html; charset=UTF-8",
"connection": "close",
"referrer-policy": "no-referrer, strict-origin-when-cross-origin",
"expect-ct": "max-age=86400, enforce",
"strict-transport-security": "max-age=63072000; includeSubDomains; preload",
"x-xss-protection": "1; mode=block",
"x-kong-proxy-latency": "0",
"x-frame-options": "DENY",
"content-security-policy": "default-src 'none'; base-uri 'none'; form-action 'self'; frame-ancestors 'none'; block-all-mixed-content; img-src 'self'; script-src 'self'; style-src 'self'; font-src 'self'",
"content-length": "1252",
"feature-policy": "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; battery 'none'; camera 'none'; display-capture 'none'; document-domain 'none'; encrypted-media 'none'; execution-while-not-rendered 'none'; execution-while-out-of-viewport 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; layout-animations 'none'; legacy-image-formats 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; navigation-override 'none'; oversized-images 'none'; payment 'none'; picture-in-picture 'none'; publickey-credentials 'none'; sync-xhr 'none'; usb 'none'; wake-lock 'none'; xr-spatial-tracking 'none'",
"via": "kong/2.0.2",
"set-cookie": [
"__Host-example_auth=some_auth; HttpOnly; Max-Age=86400; Path=/; SameSite=lax; Secure",
"__Host-example_csrf=some_csrf; HttpOnly; Max-Age=86400; Path=/; SameSite=strict; Secure"
],
"x-kong-upstream-latency": "191",
"date": "Tue, 21 Jul 2020 10:16:44 GMT",
"x-content-type-options": "nosniff"
},
"status": 200,
"size": "3552"
},
"route": {
"created_at": 1595260351,
"path_handling": "v0",
"id": "b01758b0-be33-5274-adfd-e53704dc2e4c",
"service": {
"id": "adc094b9-1359-5576-8973-5f5aac508101"
},
"name": "example.default.00",
"strip_path": False,
"preserve_host": True,
"regex_priority": 0,
"updated_at": 1595260351,
"paths": [
"/"
],
"https_redirect_status_code": 426,
"protocols": [
"http",
"https"
],
"hosts": [
"example.com"
]
},
"started_at": 1595326603250
}
expected = {
"latencies": {
"request": 191,
"kong": 0,
"proxy": 191
},
"service": {
"host": "example.default.80.svc",
"created_at": "2020-07-20T15:52:31+00:00",
"connect_timeout": 60000,
"id": "adc094b9-1359-5576-8973-5f5aac508101",
"protocol": "http",
"name": "example.default.80",
"read_timeout": 60000,
"port": 80,
"path": "/",
"updated_at": "2020-07-20T15:52:31+00:00",
"write_timeout": 60000,
"retries": 5
},
"request": {
"querystring": {
"foo": "bar",
"baz": ""
},
"size": "1430",
"uri": "/login",
"url": "https://example.com:8443/login",
"headers": {
"host": "example.com",
"content-type": "application/x-www-form-urlencoded",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"authorization": "Bearer 0Mmt7PwMgQ9Z7oYvP4ypoQ",
"cookie": "__Host-example_login_csrf-zK9kT=7xe0gvFR3iHPwx-B6ZIu8A",
"upgrade-insecure-requests": "1",
"connection": "keep-alive",
"referer": "https://example.com/login?continue=https%3A%2F%2Fexample.com%2Foauth2%2Fauthorize%3Fresponse_type%3Dcode%26client_id%3Dexample_client%26scope%3Dopenid%26state%3Dp2DOUg5DvzyFFxE9D%26nonce%3DFjKXc-cZLMHf3ohZQ_HQZQ%26redirect_uri%3Dhttps%253A%252F%252Fexample.com%252Fapp%252Foidc%252Fcallback%26new_login%3Dtrue&client_id=example_client",
"accept-language": "en-US,en;q=0.5",
"user-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:79.0) Gecko/20100101 Firefox/79.0",
"content-length": "478",
"origin": "https://example.com",
"dnt": "1",
"accept-encoding": "gzip, deflate, br"
},
"method": "POST"
},
"client_ip": "Pk7QhG5N_LBhKQyqtwiOSQ",
"tries": [
{
"balancer_latency": 0,
"port": 8080,
"balancer_start": "2020-07-21T10:16:43+00:00",
"ip": "10.244.1.139"
}
],
"upstream_uri": "/login",
"response": {
"headers": {
"content-type": "text/html; charset=UTF-8",
"connection": "close",
"referrer-policy": "no-referrer, strict-origin-when-cross-origin",
"expect-ct": "max-age=86400, enforce",
"strict-transport-security": "max-age=63072000; includeSubDomains; preload",
"x-xss-protection": "1; mode=block",
"x-kong-proxy-latency": "0",
"x-frame-options": "DENY",
"content-security-policy": "default-src 'none'; base-uri 'none'; form-action 'self'; frame-ancestors 'none'; block-all-mixed-content; img-src 'self'; script-src 'self'; style-src 'self'; font-src 'self'",
"content-length": "1252",
"feature-policy": "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; battery 'none'; camera 'none'; display-capture 'none'; document-domain 'none'; encrypted-media 'none'; execution-while-not-rendered 'none'; execution-while-out-of-viewport 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; layout-animations 'none'; legacy-image-formats 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; navigation-override 'none'; oversized-images 'none'; payment 'none'; picture-in-picture 'none'; publickey-credentials 'none'; sync-xhr 'none'; usb 'none'; wake-lock 'none'; xr-spatial-tracking 'none'",
"via": "kong/2.0.2",
"set-cookie": [
"__Host-example_auth=vsXTPw-wyNDQcioekyXCcw; HttpOnly; Max-Age=86400; Path=/; SameSite=lax; Secure",
"__Host-example_csrf=0-UmIYo1jhPDgnW5pHsEHw; HttpOnly; Max-Age=86400; Path=/; SameSite=strict; Secure"
],
"x-kong-upstream-latency": "191",
"date": "Tue, 21 Jul 2020 10:16:44 GMT",
"x-content-type-options": "nosniff"
},
"status": 200,
"size": "3552"
},
"route": {
"created_at": "2020-07-20T15:52:31+00:00",
"path_handling": "v0",
"id": "b01758b0-be33-5274-adfd-e53704dc2e4c",
"service": {
"id": "adc094b9-1359-5576-8973-5f5aac508101"
},
"name": "example.default.00",
"strip_path": False,
"preserve_host": True,
"regex_priority": 0,
"updated_at": "2020-07-20T15:52:31+00:00",
"paths": [
"/"
],
"https_redirect_status_code": 426,
"protocols": [
"http",
"https"
],
"hosts": [
"example.com"
]
},
"started_at": "2020-07-21T10:16:43+00:00"
}
result = transform_log(test_log,
do_convert_ts=True,
do_convert_qs_bools=True,
do_hash_ip=True,
do_hash_auth=True,
do_hash_cookie=True)
self.assertEqual(expected, result)
def test_transform_bad_auth(self):
test_log = {
"request": {
"headers": {
"authorization": "some_token",
},
},
}
expected = {
"request": {
"headers": {
"authorization": "0Mmt7PwMgQ9Z7oYvP4ypoQ",
},
},
}
result = transform_log(test_log,
do_hash_auth=True)
self.assertEqual(expected, result)
def test_transform_bad_cookie(self):
test_log = {
"request": {
"headers": {
"cookie": "__Host-example_login_csrf-zK9kT-some_login_csrf",
},
},
"response": {
"headers": {
"set-cookie": [
"__Host-example_auth/some_auth; HttpOnly; Max-Age=86400; Path=/; SameSite=lax; Secure",
"__Host-example_csrf|some_csrf; HttpOnly; Max-Age=86400; Path=/; SameSite=strict; Secure"
],
},
},
}
expected = {
"request": {
"headers": {
"cookie": "BPvPOrxZNo_DhGCLTtcO_A",
},
},
"response": {
"headers": {
"set-cookie": [
"ceNEbDKXcwmC6WjnoB3xNw; HttpOnly; Max-Age=86400; Path=/; SameSite=lax; Secure",
"AwdYctEnVuXiVepXBiXu-w; HttpOnly; Max-Age=86400; Path=/; SameSite=strict; Secure"
],
},
},
}
result = transform_log(test_log,
do_hash_cookie=True)
self.assertEqual(expected, result)
def test_hash_paths(self):
test_log = {
'foo': [
{'bar': 'a', 'baz': 'a'},
{'bar': 'a', 'baz': 'b'},
{'bar': 1, 'baz': 'c'},
{'bar': 1.1, 'baz': 'd'},
{'bar': ['a'], 'baz': 'e'},
{'bar': {'a': 'b'}, 'baz': 'f'},
],
}
expected = {
'foo': [
{'bar': 'J8NebpNzh38p5WJGTkZJfg', 'baz': 'a'},
{'bar': 'J8NebpNzh38p5WJGTkZJfg', 'baz': 'b'},
{'bar': 'zqOHijNLJARp0Vn_hAtkNA', 'baz': 'c'},
{'bar': 'oduRJWsoVhEUGjoLDP2igA', 'baz': 'd'},
{'bar': '9TsxVbMCmC4Za3ZFt7YUsQ', 'baz': 'e'},
{'bar': 'WJqukeZh_5Vhv1BN0Cam4Q', 'baz': 'f'},
],
}
result = transform_log(test_log,
hash_paths=['foo[].bar'])
self.assertEqual(expected, result)
def test_null_paths(self):
test_log = {
'foo': [
{'bar': 'a', 'baz': 'a'},
{'bar': 1, 'baz': 'b'},
{'bar': 1.1, 'baz': 'c'},
{'bar': ['a'], 'baz': 'd'},
{'bar': {'a': 'b'}, 'baz': 'e'},
],
}
expected = {
'foo': [
{'bar': None, 'baz': 'a'},
{'bar': None, 'baz': 'b'},
{'bar': None, 'baz': 'c'},
{'bar': None, 'baz': 'd'},
{'bar': None, 'baz': 'e'},
],
}
result = transform_log(test_log,
null_paths=['foo[].bar'])
self.assertEqual(expected, result)
| 44.683432 | 653 | 0.449315 | 15,031 | 0.995233 | 0 | 0 | 0 | 0 | 0 | 0 | 7,650 | 0.506522 |
5b48f5f2c39f9a6ca862b7d0c171aebace55201c | 655 | py | Python | tests/utils.py | ilyarogozin/homework_bot | 52bc4c028edb2a80082d4902fb35d09a3b98fc7c | [
"MIT"
] | null | null | null | tests/utils.py | ilyarogozin/homework_bot | 52bc4c028edb2a80082d4902fb35d09a3b98fc7c | [
"MIT"
] | null | null | null | tests/utils.py | ilyarogozin/homework_bot | 52bc4c028edb2a80082d4902fb35d09a3b98fc7c | [
"MIT"
] | null | null | null | from inspect import signature
from types import ModuleType
def check_function(scope: ModuleType, func_name: str, params_qty: int = 0):
"""Checks if scope has a function with specific name and params with qty"""
assert hasattr(scope, func_name), (
f'Не найдена функция `{func_name}`. '
'Не удаляйте и не переименовывайте её.'
)
func = getattr(scope, func_name)
assert callable(func), (
f'`{func_name}` должна быть функцией'
)
sig = signature(func)
assert len(sig.parameters) == params_qty, (
f'Функция `{func_name}` должна принимать '
'количество аргументов: {params_qty}'
)
| 28.478261 | 79 | 0.658015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 374 | 0.490814 |
5b4a8c62ba6b42395b0ef146b30118ae4a735b55 | 539 | py | Python | train.py | ex7763/pytorch-HED | 8695c996af48d917851bfe629ca219471d40aa5c | [
"MIT"
] | null | null | null | train.py | ex7763/pytorch-HED | 8695c996af48d917851bfe629ca219471d40aa5c | [
"MIT"
] | null | null | null | train.py | ex7763/pytorch-HED | 8695c996af48d917851bfe629ca219471d40aa5c | [
"MIT"
] | null | null | null | import torch
import yaml
import argparse
from dataset.BSD500 import BSD500Dataset
from models.HED import HED
###############
# parse cfg
###############
parser = argparse.ArgumentParser()
parser.add_argument('--cfg', dest='cfg', required=True, help='path to config file')
args = parser.parse_known_args()
args = parser.parse_args()
#print(args)
cfg_file = args.cfg
print('cfg_file: ', cfg_file)
with open('config/'+cfg_file, 'r') as f:
cfg = yaml.load(f)
print(cfg)
########################################
model = HED(cfg)
| 17.387097 | 83 | 0.625232 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.278293 |
5b4aa60fac414c1e92f0c6cad337e78c14c64cbe | 3,482 | py | Python | tests/unit/hypernode_vagrant_runner/commands/test_parse_start_runner_arguments.py | vdloo/hypernode-vagrant-runner | 241b9612769b74b536a43f4e6821003c5f1ea2c3 | [
"MIT"
] | null | null | null | tests/unit/hypernode_vagrant_runner/commands/test_parse_start_runner_arguments.py | vdloo/hypernode-vagrant-runner | 241b9612769b74b536a43f4e6821003c5f1ea2c3 | [
"MIT"
] | null | null | null | tests/unit/hypernode_vagrant_runner/commands/test_parse_start_runner_arguments.py | vdloo/hypernode-vagrant-runner | 241b9612769b74b536a43f4e6821003c5f1ea2c3 | [
"MIT"
] | null | null | null | from mock import ANY, call
from hypernode_vagrant_runner.commands import parse_start_runner_arguments
from hypernode_vagrant_runner.settings import HYPERNODE_VAGRANT_PHP_VERSIONS, \
HYPERNODE_VAGRANT_DEFAULT_USER, HYPERNODE_VAGRANT_DEFAULT_PHP_VERSION, \
HYPERNODE_VAGRANT_USERS
from tests.testcase import TestCase
class TestParseStartRunnerArguments(TestCase):
def setUp(self):
self.argument_parser = self.set_up_patch('hypernode_vagrant_runner.commands.ArgumentParser')
self.parse_arguments = self.set_up_patch('hypernode_vagrant_runner.commands.parse_arguments')
def test_parse_start_runner_arguments_instantiates_argument_parser(self):
parse_start_runner_arguments()
self.argument_parser.assert_called_once_with(
prog='hypernode-vagrant-runner',
description=ANY
)
def test_parse_start_runner_arguments_parses_arguments(self):
parse_start_runner_arguments()
self.parse_arguments.assert_called_once_with(
self.argument_parser.return_value
)
def test_parse_start_runner_adds_run_once_flag_with_default_true(self):
parse_start_runner_arguments()
expected_call = call(
'--run-once', '-1',
action='store_true',
help=ANY
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_adds_project_path_flag(self):
parse_start_runner_arguments()
expected_call = call(
'--project-path',
help=ANY
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_adds_command_to_run_flag(self):
parse_start_runner_arguments()
expected_call = call(
'--command-to-run', '-c',
help=ANY
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_adds_path_flag_for_a_pre_existing_checkout_directory(self):
parse_start_runner_arguments()
expected_call = call(
'--pre-existing-vagrant-path', '-p',
help=ANY
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_adds_php_flag(self):
parse_start_runner_arguments()
expected_call = call(
'--php',
help=ANY,
choices=HYPERNODE_VAGRANT_PHP_VERSIONS,
default=HYPERNODE_VAGRANT_DEFAULT_PHP_VERSION
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_adds_user_flag(self):
parse_start_runner_arguments()
expected_call = call(
'--user',
help=ANY,
choices=HYPERNODE_VAGRANT_USERS,
default=HYPERNODE_VAGRANT_DEFAULT_USER
)
self.assertIn(
expected_call,
self.argument_parser.return_value.add_argument.mock_calls
)
def test_parse_start_runner_arguments_returns_parsed_arguments(self):
ret = parse_start_runner_arguments()
self.assertEqual(ret, self.parse_arguments.return_value)
| 31.369369 | 101 | 0.670879 | 3,155 | 0.906088 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.069213 |
5b4d5fdd9c41e3cd542218051f15a30c5892c606 | 161 | py | Python | Configs/UNet_Configs.py | zeeshanalipnhwr/Semantic-Segmentation-Keras | 304af52c60a16e32da9f1e3f57c653f578cc8bf5 | [
"MIT"
] | 3 | 2020-04-13T07:56:03.000Z | 2020-10-13T12:56:00.000Z | Configs/UNet_Configs.py | zeeshanalipnhwr/Semantic-Segmentation-Keras | 304af52c60a16e32da9f1e3f57c653f578cc8bf5 | [
"MIT"
] | null | null | null | Configs/UNet_Configs.py | zeeshanalipnhwr/Semantic-Segmentation-Keras | 304af52c60a16e32da9f1e3f57c653f578cc8bf5 | [
"MIT"
] | 2 | 2020-04-06T02:09:04.000Z | 2020-04-06T09:43:36.000Z | DEPTH = 16 # the number of filters of the first conv layer of the encoder of the UNet
# Training hyperparameters
BATCHSIZE = 16
EPOCHS = 100
OPTIMIZER = "adam"
| 23 | 85 | 0.751553 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 106 | 0.658385 |
5b4efefa7c72bf1ac0d47d8be34d61c51dd67ac7 | 571 | py | Python | geinos/app/core/radius/radius.py | falhenaki/GEINOS | c0bf883c582972b08ab5ee45d2bd0463d6c48087 | [
"MIT"
] | 3 | 2018-03-19T16:51:21.000Z | 2019-01-18T22:52:19.000Z | geinos/app/core/radius/radius.py | falhenaki/GEINOS | c0bf883c582972b08ab5ee45d2bd0463d6c48087 | [
"MIT"
] | 261 | 2018-02-08T16:24:26.000Z | 2018-08-07T03:38:16.000Z | geinos/app/core/radius/radius.py | falhenaki/GEINOS | c0bf883c582972b08ab5ee45d2bd0463d6c48087 | [
"MIT"
] | null | null | null | from sqlalchemy import *
from sqlalchemy import Column, String
from sqlalchemy.ext.declarative import declarative_base
from app.core.sqlalchemy_base.augmented_base import CustomMixin
Base = declarative_base()
class Radius(CustomMixin, Base):
__tablename__ = "Radius"
host = Column(String, primary_key=True)
port = Column(Integer)
secret = Column(String)
# ----------------------------------------------------------------------
def __init__(self, secret, host, port):
self.secret = secret
self.host = host
self.port = port
| 33.588235 | 76 | 0.628722 | 360 | 0.630473 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.140105 |
5b51429e70bf3b023a44e4c609ce439b41983428 | 2,747 | py | Python | FCN.py | xinming365/DL-exercise | 3ef153bc1aa791889cd37a7d3c14bef3937c459a | [
"Apache-2.0"
] | null | null | null | FCN.py | xinming365/DL-exercise | 3ef153bc1aa791889cd37a7d3c14bef3937c459a | [
"Apache-2.0"
] | 1 | 2020-07-31T13:15:06.000Z | 2020-07-31T13:17:34.000Z | FCN.py | xinming365/DL-exercise | 3ef153bc1aa791889cd37a7d3c14bef3937c459a | [
"Apache-2.0"
] | null | null | null | # Author : xinming
# Time : 2020/06/27
# Imports
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.data import DataLoader
import torchvision.datasets as datasets
import torchvision.transforms as transforms
#Create fully connected network
# Base class for all neural network modules. Your models should also subclass this class.
class NN(nn.Module):
def __init__(self, input_size, num_classes):
super(NN, self).__init__()
self.fc1 = nn.Linear(input_size, 50)
self.fc2 = nn.Linear(50, num_classes)
def forward(self, x):
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
model = NN(input_size= 784, num_classes=10)
x = torch.randn(64, 784)
print(model(x).shape)
# Set device
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyperparameters
batch_size=64
num_epochs=1
learning_rate=0.001
# load data
train_dataset = datasets.MNIST(root='dataset/', train=True, transform=transforms.ToTensor(),download=True)
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_dataset = datasets.MNIST(root='dataset/', train=False, transform=transforms.ToTensor(),download=True)
test_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
# Train Network
for epoch in range(num_epochs):
for batch_idx, (data, targets) in enumerate(train_loader):
# Get data to cuda if possible
data = data.to(device=device)
targets = targets.to(device=device)
# Get to correct shape
data = data.reshape(data.shape[0], -1)
#forward
scores=model(data)
loss = criterion(scores, targets)
#backward
optimizer.zero_grad()
loss.backward()
#gradient descent or adam step
optimizer.step()
def check_accuracy(loader, model):
if loader.dataset.train:
print("Checking accuracy on training data")
else:
print("Checking accuracy on test data")
num_correct=0
num_samples=0
model.eval()
with torch.no_grad():
for x, y in loader:
x=x.to(device=device)
y=y.to(device=device)
x=x.reshape(x.shape[0],-1)
scores=model(x)
_, predictions = scores.max(1)
num_correct+=(predictions==y).sum()
num_samples+=predictions.size(0)
print(f"Got {num_correct} / {num_samples} with accuracy {float(num_correct)/float(num_samples) *100:.2f}")
model.train()
check_accuracy(train_loader, model)
check_accuracy(test_loader, model)
| 28.319588 | 114 | 0.680743 | 295 | 0.10739 | 0 | 0 | 0 | 0 | 0 | 0 | 539 | 0.196214 |
5b51b63f59cf7aeec2c467a2071c5cf2def5fcc8 | 15,485 | py | Python | skexplain/common/utils.py | monte-flora/scikit-explain | d93ca4c77d1d47e613479ae36cc055ffaafea88c | [
"MIT"
] | null | null | null | skexplain/common/utils.py | monte-flora/scikit-explain | d93ca4c77d1d47e613479ae36cc055ffaafea88c | [
"MIT"
] | 7 | 2022-03-06T01:51:33.000Z | 2022-03-28T13:06:34.000Z | skexplain/common/utils.py | monte-flora/scikit-explain | d93ca4c77d1d47e613479ae36cc055ffaafea88c | [
"MIT"
] | null | null | null | import numpy as np
import xarray as xr
import pandas as pd
from collections import ChainMap
from statsmodels.distributions.empirical_distribution import ECDF
from scipy.stats import t
class MissingFeaturesError(Exception):
""" Raised when features are missing.
E.g., All features are require for
IAS or MEC
"""
def __init__(self, estimator_name, missing_features):
self.message = f"""ALE for {estimator_name} was not computed for all features.
These features were missing: {missing_features}"""
super().__init__(self.message)
def check_all_features_for_ale(ale, estimator_names, features):
""" Is there ALE values for each feature """
data_vars = ale.data_vars
for estimator_name in estimator_names:
_list = [True if f'{f}__{estimator_name}__ale' in data_vars else False for f in features]
if not all(_list):
missing_features = np.array(features)[np.where(~np.array(_list))[0]]
raise MissingFeaturesError(estimator_name, missing_features)
def flatten_nested_list(list_of_lists):
"""Turn a list of list into a single, flatten list"""
all_elements_are_lists = all([is_list(item) for item in list_of_lists])
if not all_elements_are_lists:
new_list_of_lists = []
for item in list_of_lists:
if is_list(item):
new_list_of_lists.append(item)
else:
new_list_of_lists.append([item])
list_of_lists = new_list_of_lists
return [item for elem in list_of_lists for item in elem]
def is_dataset(data):
return isinstance(data, xr.Dataset)
def is_dataframe(data):
return isinstance(data, pd.DataFrame)
def check_is_permuted(X, X_permuted):
permuted_features = []
for f in X.columns:
if not np.array_equal(X.loc[:, f], X_permuted.loc[:, f]):
permuted_features.append(f)
return permuted_features
def is_correlated(corr_matrix, feature_pairs, rho_threshold=0.8):
"""
Returns dict where the key are the feature pairs and the items
are booleans of whether the pair is linearly correlated above the
given threshold.
"""
results = {}
for pair in feature_pairs:
f1, f2 = pair.split("__")
corr = corr_matrix[f1][f2]
results[pair] = round(corr, 3) >= rho_threshold
return results
def is_fitted(estimator):
"""
Checks if a scikit-learn estimator/transformer has already been fit.
Parameters
----------
estimator: scikit-learn estimator (e.g. RandomForestClassifier)
or transformer (e.g. MinMaxScaler) object
Returns
-------
Boolean that indicates if ``estimator`` has already been fit (True) or not (False).
"""
attrs = [v for v in vars(estimator) if v.endswith("_") and not v.startswith("__")]
return len(attrs) != 0
def determine_feature_dtype(X, features):
"""
Determine if any features are categorical.
"""
feature_names = list(X.columns)
non_cat_features = []
cat_features = []
for f in features:
if f not in feature_names:
raise KeyError(f"'{f}' is not a valid feature.")
if str(X.dtypes[f]) == "category":
cat_features.append(f)
else:
non_cat_features.append(f)
return non_cat_features, cat_features
def cartesian(array, out=None):
"""Generate a cartesian product of input array.
Parameters
Codes comes directly from sklearn/utils/extmath.py
----------
array : list of array-like
1-D array to form the cartesian product of.
out : ndarray
Array to place the cartesian product in.
Returns
-------
out : ndarray
2-D array of shape (M, len(array)) containing cartesian products
formed of input array.
X
--------
>>> cartesian(([1, 2, 3], [4, 5], [6, 7]))
array([[1, 4, 6],
[1, 4, 7],
[1, 5, 6],
[1, 5, 7],
[2, 4, 6],
[2, 4, 7],
[2, 5, 6],
[2, 5, 7],
[3, 4, 6],
[3, 4, 7],
[3, 5, 6],
[3, 5, 7]])
"""
array = [np.asarray(x) for x in array]
shape = (len(x) for x in array)
dtype = array[0].dtype
ix = np.indices(shape)
ix = ix.reshape(len(array), -1).T
if out is None:
out = np.empty_like(ix, dtype=dtype)
for n, arr in enumerate(array):
out[:, n] = array[n][ix[:, n]]
return out
def to_dataframe(results, estimator_names, feature_names):
"""
Convert the feature contribution results to a pandas.DataFrame
with nested indexing.
"""
# results[0] = dict of avg. contributions per estimator
# results[1] = dict of avg. feature values per estimator
contrib_names = feature_names.copy()
contrib_names += ["Bias"]
nested_key = results[0][estimator_names[0]].keys()
dframes = []
for key in nested_key:
data = []
for name in estimator_names:
contribs_dict = results[0][name][key]
vals_dict = results[1][name][key]
data.append(
[contribs_dict[f] for f in contrib_names]
+ [vals_dict[f] for f in feature_names]
)
column_names = [f + "_contrib" for f in contrib_names] + [
f + "_val" for f in feature_names
]
df = pd.DataFrame(data, columns=column_names, index=estimator_names)
dframes.append(df)
result = pd.concat(dframes, keys=list(nested_key))
return result
def to_xarray(data):
"""Converts data dict to xarray.Dataset"""
ds = xr.Dataset(data)
return ds
def is_str(a):
"""Check if argument is a string"""
return isinstance(a, str)
def is_list(a):
"""Check if argument is a list"""
return isinstance(a, list)
def to_list(a):
"""Convert argument to a list"""
return [a]
def is_tuple(a):
"""Check if argument is a tuple"""
return isinstance(a, tuple)
def is_valid_feature(features, official_feature_list):
"""Check if a feature is valid"""
for f in features:
if isinstance(f, tuple):
for sub_f in f:
if sub_f not in official_feature_list:
raise Exception(f"Feature {sub_f} is not a valid feature!")
else:
if f not in official_feature_list:
raise Exception(f"Feature {f} is not a valid feature!")
def is_classifier(estimator):
"""Return True if the given estimator is (probably) a classifier.
Parameters
Function from base.py in sklearn
----------
estimator : object
Estimator object to test.
Returns
-------
out : bool
True if estimator is a classifier and False otherwise.
"""
return getattr(estimator, "_estimator_type", None) == "classifier"
def is_regressor(estimator):
"""Return True if the given estimator is (probably) a regressor.
Parameters
Functions from base.py in sklearn
----------
estimator : object
Estimator object to test.
Returns
-------
out : bool
True if estimator is a regressor and False otherwise.
"""
return getattr(estimator, "_estimator_type", None) == "regressor"
def is_all_dict(alist):
"""Check if every element of a list are dicts"""
return all([isinstance(l, dict) for l in alist])
def compute_bootstrap_indices(X, subsample=1.0, n_bootstrap=1, seed=90):
"""
Routine to generate the indices for bootstrapped X.
Args:
----------------
X : pandas.DataFrame, numpy.array
subsample : float or integer
n_bootstrap : integer
Return:
----------------
bootstrap_indices : list
list of indices of the size of subsample or subsample*len(X)
"""
base_random_state = np.random.RandomState(seed=seed)
random_num_set = base_random_state.choice(10000, size=n_bootstrap, replace=False)
random_states = [np.random.RandomState(s) for s in random_num_set]
n_samples = len(X)
size = int(n_samples * subsample) if subsample <= 1.0 else subsample
bootstrap_indices = [
random_state.choice(range(n_samples), size=size).tolist()
for random_state in random_states
]
return bootstrap_indices
def merge_dict(dicts):
"""Merge a list of dicts into a single dict"""
return dict(ChainMap(*dicts))
def merge_nested_dict(dicts):
"""
Merge a list of nested dicts into a single dict
"""
merged_dict = {}
for d in dicts:
for key in d.keys():
for subkey in d[key].keys():
if key not in list(merged_dict.keys()):
merged_dict[key] = {subkey: {}}
merged_dict[key][subkey] = d[key][subkey]
return merged_dict
def is_outlier(points, thresh=3.5):
"""
Returns a boolean array with True if points are outliers and False
otherwise.
Parameters:
-----------
points : An numobservations by numdimensions array of observations
thresh : The modified z-score to use as a threshold. Observations with
a modified z-score (based on the median absolute deviation) greater
than this value will be classified as outliers.
Returns:
--------
mask : A numobservations-length boolean array.
References:
----------
Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and
Handle Outliers", The ASQC Basic References in Quality Control:
Statistical Techniques, Edward F. Mykytka, Ph.D., Editor.
"""
if len(points.shape) == 1:
points = points[:, None]
median = np.median(points, axis=0)
diff = np.sum((points - median) ** 2, axis=-1)
diff = np.sqrt(diff)
med_abs_deviation = np.median(diff)
modified_z_score = 0.6745 * diff / med_abs_deviation
return modified_z_score > thresh
def cmds(D, k=2):
"""Classical multidimensional scaling
Theory and code references:
https://en.wikipedia.org/wiki/Multidimensional_scaling#Classical_multidimensional_scaling
http://www.nervouscomputer.com/hfs/cmdscale-in-python/
Arguments:
D -- A squared matrix-like object (array, DataFrame, ....), usually a distance matrix
"""
n = D.shape[0]
if D.shape[0] != D.shape[1]:
raise Exception("The matrix D should be squared")
if k > (n - 1):
raise Exception("k should be an integer <= D.shape[0] - 1")
# (1) Set up the squared proximity matrix
D_double = np.square(D)
# (2) Apply double centering: using the centering matrix
# centering matrix
center_mat = np.eye(n) - np.ones((n, n)) / n
# apply the centering
B = -(1 / 2) * center_mat.dot(D_double).dot(center_mat)
# (3) Determine the m largest eigenvalues
# (where m is the number of dimensions desired for the output)
# extract the eigenvalues
eigenvals, eigenvecs = np.linalg.eigh(B)
# sort descending
idx = np.argsort(eigenvals)[::-1]
eigenvals = eigenvals[idx]
eigenvecs = eigenvecs[:, idx]
# (4) Now, X=eigenvecs.dot(eigen_sqrt_diag),
# where eigen_sqrt_diag = diag(sqrt(eigenvals))
eigen_sqrt_diag = np.diag(np.sqrt(eigenvals[0:k]))
ret = eigenvecs[:, 0:k].dot(eigen_sqrt_diag)
return ret
def order_groups(X, feature):
"""Assign an order to the values of a categorical feature.
The function returns an order to the unique values in X[feature] according to
their similarity based on the other features.
The distance between two categories is the sum over the distances of each feature.
Arguments:
X -- A pandas DataFrame containing all the features to considering in the ordering
(including the categorical feature to be ordered).
feature -- String, the name of the column holding the categorical feature to be ordered.
"""
features = X.columns
# groups = X[feature].cat.categories.values
groups = X[feature].unique()
D_cumu = pd.DataFrame(0, index=groups, columns=groups)
K = len(groups)
for j in set(features) - set([feature]):
D = pd.DataFrame(index=groups, columns=groups)
# discrete/factor feature j
# e.g. j = 'color'
if (X[j].dtypes.name == "category") | (
(len(X[j].unique()) <= 10) & ("float" not in X[j].dtypes.name)
):
# counts and proportions of each value in j in each group in 'feature'
cross_counts = pd.crosstab(X[feature], X[j])
cross_props = cross_counts.div(np.sum(cross_counts, axis=1), axis=0)
for i in range(K):
group = groups[i]
D_values = abs(cross_props - cross_props.loc[group]).sum(axis=1) / 2
D.loc[group, :] = D_values
D.loc[:, group] = D_values
else:
# continuous feature j
# e.g. j = 'length'
# extract the 1/100 quantiles of the feature j
seq = np.arange(0, 1, 1 / 100)
q_X_j = X[j].quantile(seq).to_list()
# get the ecdf (empiricial cumulative distribution function)
# compute the function from the data points in each group
X_ecdf = X.groupby(feature)[j].agg(ECDF)
# apply each of the functions on the quantiles
# i.e. for each quantile value get the probability that j will take
# a value less than or equal to this value.
q_ecdf = X_ecdf.apply(lambda x: x(q_X_j))
for i in range(K):
group = groups[i]
D_values = q_ecdf.apply(lambda x: max(abs(x - q_ecdf[group])))
D.loc[group, :] = D_values
D.loc[:, group] = D_values
D_cumu = D_cumu + D
# reduce the dimension of the cumulative distance matrix to 1
D1D = cmds(D_cumu, 1).flatten()
# order groups based on the values
order_idx = D1D.argsort()
groups_ordered = D_cumu.index[D1D.argsort()]
return pd.Series(range(K), index=groups_ordered)
def quantile_ied(x_vec, q):
"""
Inverse of empirical distribution function (quantile R type 1).
More details in
https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.mstats.mquantiles.html
https://stat.ethz.ch/R-manual/R-devel/library/stats/html/quantile.html
https://en.wikipedia.org/wiki/Quantile
Arguments:
x_vec -- A pandas series containing the values to compute the quantile for
q -- An array of probabilities (values between 0 and 1)
"""
x_vec = x_vec.sort_values()
n = len(x_vec) - 1
m = 0
j = (n * q + m).astype(int) # location of the value
g = n * q + m - j
gamma = (g != 0).astype(int)
quant_res = (1 - gamma) * x_vec.shift(1, fill_value=0).iloc[j] + gamma * x_vec.iloc[
j
]
quant_res.index = q
# add min at quantile zero and max at quantile one (if needed)
if 0 in q:
quant_res.loc[0] = x_vec.min()
if 1 in q:
quant_res.loc[1] = x_vec.max()
return quant_res
def CI_estimate(x_vec, C=0.95):
"""Estimate the size of the confidence interval of a data sample.
The confidence interval of the given data sample (x_vec) is
[mean(x_vec) - returned value, mean(x_vec) + returned value].
"""
alpha = 1 - C
n = len(x_vec)
stand_err = x_vec.std() / np.sqrt(n)
critical_val = 1 - (alpha / 2)
z_star = stand_err * t.ppf(critical_val, n - 1)
return z_star
| 31.156942 | 97 | 0.619309 | 412 | 0.026606 | 0 | 0 | 0 | 0 | 0 | 0 | 6,915 | 0.446561 |
5b539203b00b4fad7af2f3522746cdd2fcb16ed4 | 5,941 | py | Python | recipes/recipe_modules/powershell/resources/psinvoke.py | xswz8015/infra | f956b78ce4c39cc76acdda47601b86794ae0c1ba | [
"BSD-3-Clause"
] | null | null | null | recipes/recipe_modules/powershell/resources/psinvoke.py | xswz8015/infra | f956b78ce4c39cc76acdda47601b86794ae0c1ba | [
"BSD-3-Clause"
] | 7 | 2022-02-15T01:11:37.000Z | 2022-03-02T12:46:13.000Z | recipes/recipe_modules/powershell/resources/psinvoke.py | NDevTK/chromium-infra | d38e088e158d81f7f2065a38aa1ea1894f735ec4 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import subprocess
import argparse
import sys
import os
import re
import json
import codecs
import platform
# Used to run commands through powershell on windows and collect the responses
# and log files that may be generated by the command run. This is meant for use
# in situations where the commands either don't return the results to STDOUT,
# instead writing to a log file, or if the commands do return results but do not
# write the logs to STDOUT. This can also be used to run regular windows
# executables as well, powershell will happily execute them for you.
# ALLOWLIST filters the files that can be read to STDOUT
ALLOWLIST = [
re.compile(y) for y in [
'^.*\.log$', # Log files
]
]
# TODO(anushruth): Update list with all other possibilities.
codec_map = {
codecs.BOM_UTF16_LE: 'utf-16-le',
codecs.BOM_UTF32: 'utf-32',
codecs.BOM_UTF8: 'utf-8-sig'
}
def main(argv):
"""Runs the given powershell command and writes all the logs to stdout """
# Ensure we are running on windows. Until MS releases powershell for other
# platforms
if platform.system() != 'Windows':
print(json.dumps(gen_result('Not run on Windows')))
return
parser = argparse.ArgumentParser(
description="""Runs a powershell command,
waits for its completion and returns all
the output logs to stdout as a json""",
epilog="""Meant to be used by the powershell
recipe module to run a command that
generates multiple logs and stream the logs
back to the recipe""")
# command is the actual powershell command required to be run
parser.add_argument(
'--command', required=True, help='powershell command to execute')
parser.add_argument(
'--logs',
required=False,
nargs='*',
help='log file or dir to watch and return stuff from')
parser.add_argument(
'--ret_codes',
required=False,
type=int,
default=[0],
nargs='*',
help='return codes to treat as success')
parser.add_argument(
'args', nargs='*', help='optionals args to the powershell command')
iput = parser.parse_args(argv[1:])
logs = exec_ps(
iput.command, iput.logs, args=iput.args, ret_codes=iput.ret_codes)
print(json.dumps(logs))
def ensure_logs(logs):
""" Checks if any log dir doesn't exist and creates it"""
if not logs:
return
for log in logs:
if not os.path.exists(log) and not is_allowed(log):
# If the path doesn't exist and is not a file. Create the dir
os.makedirs(log)
def exec_ps(command, logs, args, ret_codes):
""" Executes a power shell command and waits for it to complete.
Returns all the logs on completion as a json to stdout.
command: path to script/executable/batchfile, powershell command
logs: list of log files and directories.
args: optional args to the command
ret_codes: optional success return codes
Returns dict containing keys 'results' and every '<log-file>' in logs."""
ensure_logs(logs)
# powershell command
psc = ['powershell', '-Command', command] + args
# l contains all the logs + return values
l = {}
# Attempt to execute the command
try:
output = subprocess.check_output(psc, stderr=subprocess.STDOUT)
try:
# Check if the output is a json file
jout = json.loads(output)
except Exception as j:
# It's not known if the script completed successfully
l = gen_result(
'No json object returned. Check stdout_stderr for ' +
'results. {}'.format(j), True)
# not a json return script
l['stdout_stderr'] = output
else:
# It's a json file
l['results'] = jout
except subprocess.CalledProcessError as e:
# Non zero return by the script/cmd run
l = gen_result(e.output, e.returncode in ret_codes)
l['results']['Command'] = e.cmd
l['results']['ReturnCode'] = e.returncode
l['stdout_stderr'] = e.output
except Exception as e:
# Failed to run the command for some reason
l = gen_result(str(e))
l['results']['Command'] = ' '.join(psc)
finally:
# Read all the logs to stdout
if logs:
for log in logs:
if os.path.isdir(log):
for k, v in read_logs([os.path.join(log, x) for x in os.listdir(log)
]).items():
l[k] = v
else:
op = read_logs(log, l)
l[log] = op[log]
return l
def read_logs(logs):
""" Reads all the given files to RAM and returns a dict of contents.
logs: list of log files and directories.
Returns dict containing keys for each log file and its contents
as value. """
l = {}
for log in logs:
if not os.path.isdir(log) and is_allowed(log):
f = open(log, 'r')
contents = f.read()
l[log] = contents
# Some logs may be encoded differently. Check if they have the unicode
# BOM at the start and decode them. ADKSetup is known to generate logs
# with different encodings during the same run.
for k, v in codec_map.items():
if len(contents) >= len(k) and k == contents[:len(k)]:
# See codec_map for codec to decode string mapping
l[log] = contents.decode(v)
break
f.close()
return l
def is_allowed(l):
""" Implements ALLOWLIST
l: log file to check
Returns True if l matches anything in ALLOWLIST, false otherwise. """
for d in ALLOWLIST:
if d.match(l) != None:
return True
return False
def gen_result(err, success=False):
""" gen_result returns the result dict with given params"""
return {'results': {'Success': success, 'ErrorInfo': {'Message': err,}}}
if __name__ == '__main__':
main(sys.argv)
| 30.782383 | 80 | 0.650059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,307 | 0.55664 |