hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1f58099aa1fd3866ae1ea7e7e33449de571610fe
| 45,464
|
py
|
Python
|
manila/db/api.py
|
vponomaryov/manila
|
ffe135a5b35a0964179f0dc148d569037f26a929
|
[
"Apache-2.0"
] | null | null | null |
manila/db/api.py
|
vponomaryov/manila
|
ffe135a5b35a0964179f0dc148d569037f26a929
|
[
"Apache-2.0"
] | null | null | null |
manila/db/api.py
|
vponomaryov/manila
|
ffe135a5b35a0964179f0dc148d569037f26a929
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
The underlying driver is loaded as a :class:`LazyPluggable`.
Functions in this module are imported into the manila.db namespace. Call these
functions from manila.db namespace, not the manila.db.api namespace.
All functions in this module return objects that implement a dictionary-like
interface. Currently, many of these objects are sqlalchemy objects that
implement a dictionary interface. However, a future goal is to have all of
these objects be simple dictionaries.
**Related Flags**
:backend: string to lookup in the list of LazyPluggable backends.
`sqlalchemy` is the only supported backend right now.
:connection: string specifying the sqlalchemy connection to use, like:
`sqlite:///var/lib/manila/manila.sqlite`.
:enable_new_services: when adding a new service to the database, is it in the
pool of available hardware (Default: True)
"""
from oslo_config import cfg
from oslo_db import api as db_api
db_opts = [
cfg.StrOpt('db_backend',
default='sqlalchemy',
help='The backend to use for database.'),
cfg.BoolOpt('enable_new_services',
default=True,
help='Services to be added to the available pool on create.'),
cfg.StrOpt('share_name_template',
default='share-%s',
help='Template string to be used to generate share names.'),
cfg.StrOpt('share_snapshot_name_template',
default='share-snapshot-%s',
help='Template string to be used to generate share snapshot '
'names.'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
_BACKEND_MAPPING = {'sqlalchemy': 'manila.db.sqlalchemy.api'}
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING,
lazy=True)
def authorize_project_context(context, project_id):
"""Ensures a request has permission to access the given project."""
return IMPL.authorize_project_context(context, project_id)
def authorize_quota_class_context(context, class_name):
"""Ensures a request has permission to access the given quota class."""
return IMPL.authorize_quota_class_context(context, class_name)
###################
def service_destroy(context, service_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, service_id)
def service_get(context, service_id):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by host it's on and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_share_sorted(context):
"""Get all share services sorted by share count.
:returns: a list of (Service, share_count) tuples.
"""
return IMPL.service_get_all_share_sorted(context)
def service_get_by_args(context, host, binary):
"""Get the state of an service by node name and binary."""
return IMPL.service_get_by_args(context, host, binary)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on an service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
####################
def quota_create(context, project_id, resource, limit, user_id=None):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit,
user_id=user_id)
def quota_get(context, project_id, resource, user_id=None):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource, user_id=user_id)
def quota_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all quotas associated with a given project and user."""
return IMPL.quota_get_all_by_project_and_user(context, project_id, user_id)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_get_all(context, project_id):
"""Retrieve all user quotas associated with a given project."""
return IMPL.quota_get_all(context, project_id)
def quota_update(context, project_id, resource, limit, user_id=None):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit,
user_id=user_id)
###################
def quota_class_create(context, class_name, resource, limit):
"""Create a quota class for the given name and resource."""
return IMPL.quota_class_create(context, class_name, resource, limit)
def quota_class_get(context, class_name, resource):
"""Retrieve a quota class or raise if it does not exist."""
return IMPL.quota_class_get(context, class_name, resource)
def quota_class_get_default(context):
"""Retrieve all default quotas."""
return IMPL.quota_class_get_default(context)
def quota_class_get_all_by_name(context, class_name):
"""Retrieve all quotas associated with a given quota class."""
return IMPL.quota_class_get_all_by_name(context, class_name)
def quota_class_update(context, class_name, resource, limit):
"""Update a quota class or raise if it does not exist."""
return IMPL.quota_class_update(context, class_name, resource, limit)
###################
def quota_usage_get(context, project_id, resource, user_id=None):
"""Retrieve a quota usage or raise if it does not exist."""
return IMPL.quota_usage_get(context, project_id, resource, user_id=user_id)
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project_and_user(context,
project_id, user_id)
def quota_usage_get_all_by_project(context, project_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project(context, project_id)
def quota_usage_create(context, project_id, user_id, resource, in_use,
reserved=0, until_refresh=None):
"""Create a quota usage."""
return IMPL.quota_usage_create(context, project_id, user_id, resource,
in_use, reserved, until_refresh)
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
"""Update a quota usage or raise if it does not exist."""
return IMPL.quota_usage_update(context, project_id, user_id, resource,
**kwargs)
###################
def quota_reserve(context, resources, quotas, user_quotas, deltas, expire,
until_refresh, max_age, project_id=None, user_id=None):
"""Check quotas and create appropriate reservations."""
return IMPL.quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age,
project_id=project_id, user_id=user_id)
def reservation_commit(context, reservations, project_id=None, user_id=None):
"""Commit quota reservations."""
return IMPL.reservation_commit(context, reservations,
project_id=project_id,
user_id=user_id)
def reservation_rollback(context, reservations, project_id=None, user_id=None):
"""Roll back quota reservations."""
return IMPL.reservation_rollback(context, reservations,
project_id=project_id,
user_id=user_id)
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
"""Destroy all quotas associated with a given project and user."""
return IMPL.quota_destroy_all_by_project_and_user(context,
project_id, user_id)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_destroy_all_by_project(context, project_id)
def reservation_expire(context):
"""Roll back any expired reservations."""
return IMPL.reservation_expire(context)
###################
def share_instance_get(context, instance_id, with_share_data=False):
"""Get share instance by id."""
return IMPL.share_instance_get(context, instance_id,
with_share_data=with_share_data)
def share_instance_create(context, share_id, values):
"""Create new share instance."""
return IMPL.share_instance_create(context, share_id, values)
def share_instance_delete(context, instance_id):
"""Delete share instance."""
return IMPL.share_instance_delete(context, instance_id)
def share_instance_update(context, instance_id, values, with_share_data=False):
"""Update share instance fields."""
return IMPL.share_instance_update(context, instance_id, values,
with_share_data=with_share_data)
def share_instances_get_all(context):
"""Returns all share instances."""
return IMPL.share_instances_get_all(context)
def share_instances_get_all_by_share_server(context, share_server_id):
"""Returns all share instances with given share_server_id."""
return IMPL.share_instances_get_all_by_share_server(context,
share_server_id)
def share_instances_get_all_by_host(context, host, with_share_data=False):
"""Returns all share instances with given host."""
return IMPL.share_instances_get_all_by_host(
context, host, with_share_data=with_share_data)
def share_instances_get_all_by_share_network(context, share_network_id):
"""Returns list of shares that belong to given share network."""
return IMPL.share_instances_get_all_by_share_network(context,
share_network_id)
def share_instances_get_all_by_share(context, share_id):
"""Returns list of shares that belong to given share."""
return IMPL.share_instances_get_all_by_share(context, share_id)
def share_instances_get_all_by_share_group_id(context, share_group_id):
"""Returns list of share instances that belong to given share group."""
return IMPL.share_instances_get_all_by_share_group_id(
context, share_group_id)
###################
def share_create(context, share_values, create_share_instance=True):
"""Create new share."""
return IMPL.share_create(context, share_values,
create_share_instance=create_share_instance)
def share_update(context, share_id, values):
"""Update share fields."""
return IMPL.share_update(context, share_id, values)
def share_get(context, share_id):
"""Get share by id."""
return IMPL.share_get(context, share_id)
def share_get_all(context, filters=None, sort_key=None, sort_dir=None):
"""Get all shares."""
return IMPL.share_get_all(
context, filters=filters, sort_key=sort_key, sort_dir=sort_dir,
)
def share_get_all_by_project(context, project_id, filters=None,
is_public=False, sort_key=None, sort_dir=None):
"""Returns all shares with given project ID."""
return IMPL.share_get_all_by_project(
context, project_id, filters=filters, is_public=is_public,
sort_key=sort_key, sort_dir=sort_dir,
)
def share_get_all_by_share_group_id(context, share_group_id,
filters=None, sort_key=None,
sort_dir=None):
"""Returns all shares with given project ID and share group id."""
return IMPL.share_get_all_by_share_group_id(
context, share_group_id, filters=filters,
sort_key=sort_key, sort_dir=sort_dir)
def share_get_all_by_share_server(context, share_server_id, filters=None,
sort_key=None, sort_dir=None):
"""Returns all shares with given share server ID."""
return IMPL.share_get_all_by_share_server(
context, share_server_id, filters=filters, sort_key=sort_key,
sort_dir=sort_dir,
)
def share_delete(context, share_id):
"""Delete share."""
return IMPL.share_delete(context, share_id)
###################
def share_access_create(context, values):
"""Allow access to share."""
return IMPL.share_access_create(context, values)
def share_access_get(context, access_id):
"""Get share access rule."""
return IMPL.share_access_get(context, access_id)
def share_access_get_all_for_share(context, share_id):
"""Get all access rules for given share."""
return IMPL.share_access_get_all_for_share(context, share_id)
def share_access_get_all_for_instance(context, instance_id, filters=None,
with_share_access_data=True):
"""Get all access rules related to a certain share instance."""
return IMPL.share_access_get_all_for_instance(
context, instance_id, filters=filters,
with_share_access_data=with_share_access_data)
def share_access_get_all_by_type_and_access(context, share_id, access_type,
access):
"""Returns share access by given type and access."""
return IMPL.share_access_get_all_by_type_and_access(
context, share_id, access_type, access)
def share_instance_access_create(context, values, share_instance_id):
"""Allow access to share instance."""
return IMPL.share_instance_access_create(
context, values, share_instance_id)
def share_instance_access_copy(context, share_id, instance_id):
"""Maps the existing access rules for the share to the instance in the DB.
Adds the instance mapping to the share's access rules and
returns the share's access rules.
"""
return IMPL.share_instance_access_copy(context, share_id, instance_id)
def share_instance_access_get(context, access_id, instance_id,
with_share_access_data=True):
"""Get access rule mapping for share instance."""
return IMPL.share_instance_access_get(
context, access_id, instance_id,
with_share_access_data=with_share_access_data)
def share_instance_access_update(context, access_id, instance_id, updates):
"""Update the access mapping row for a given share instance and access."""
return IMPL.share_instance_access_update(
context, access_id, instance_id, updates)
def share_instance_access_delete(context, mapping_id):
"""Deny access to share instance."""
return IMPL.share_instance_access_delete(context, mapping_id)
####################
def share_snapshot_instance_update(context, instance_id, values):
"""Set the given properties on a share snapshot instance and update it.
Raises NotFound if snapshot instance does not exist.
"""
return IMPL.share_snapshot_instance_update(context, instance_id, values)
def share_snapshot_instance_create(context, snapshot_id, values):
"""Create a share snapshot instance for an existing snapshot."""
return IMPL.share_snapshot_instance_create(
context, snapshot_id, values)
def share_snapshot_instance_get(context, instance_id, with_share_data=False):
"""Get a snapshot instance or raise a NotFound exception."""
return IMPL.share_snapshot_instance_get(
context, instance_id, with_share_data=with_share_data)
def share_snapshot_instance_get_all_with_filters(context, filters,
with_share_data=False):
"""Get all snapshot instances satisfying provided filters."""
return IMPL.share_snapshot_instance_get_all_with_filters(
context, filters, with_share_data=with_share_data)
def share_snapshot_instance_delete(context, snapshot_instance_id):
"""Delete a share snapshot instance."""
return IMPL.share_snapshot_instance_delete(context, snapshot_instance_id)
####################
def share_snapshot_create(context, values):
"""Create a snapshot from the values dictionary."""
return IMPL.share_snapshot_create(context, values)
def share_snapshot_get(context, snapshot_id):
"""Get a snapshot or raise if it does not exist."""
return IMPL.share_snapshot_get(context, snapshot_id)
def share_snapshot_get_all(context, filters=None, sort_key=None,
sort_dir=None):
"""Get all snapshots."""
return IMPL.share_snapshot_get_all(
context, filters=filters, sort_key=sort_key, sort_dir=sort_dir,
)
def share_snapshot_get_all_by_project(context, project_id, filters=None,
sort_key=None, sort_dir=None):
"""Get all snapshots belonging to a project."""
return IMPL.share_snapshot_get_all_by_project(
context, project_id, filters=filters, sort_key=sort_key,
sort_dir=sort_dir,
)
def share_snapshot_get_all_for_share(context, share_id, filters=None,
sort_key=None, sort_dir=None):
"""Get all snapshots for a share."""
return IMPL.share_snapshot_get_all_for_share(
context, share_id, filters=filters, sort_key=sort_key,
sort_dir=sort_dir,
)
def share_snapshot_get_latest_for_share(context, share_id):
"""Get the most recent snapshot for a share."""
return IMPL.share_snapshot_get_latest_for_share(context, share_id)
def share_snapshot_update(context, snapshot_id, values):
"""Set the given properties on an snapshot and update it.
Raises NotFound if snapshot does not exist.
"""
return IMPL.share_snapshot_update(context, snapshot_id, values)
###################
def share_snapshot_access_create(context, values):
"""Create a share snapshot access from the values dictionary."""
return IMPL.share_snapshot_access_create(context, values)
def share_snapshot_access_get(context, access_id):
"""Get share snapshot access rule from given access_id."""
return IMPL.share_snapshot_access_get(context, access_id)
def share_snapshot_access_get_all_for_snapshot_instance(
context, snapshot_instance_id, session=None):
"""Get all access rules related to a certain snapshot instance."""
return IMPL.share_snapshot_access_get_all_for_snapshot_instance(
context, snapshot_instance_id, session)
def share_snapshot_access_get_all_for_share_snapshot(context,
share_snapshot_id,
filters):
"""Get all access rules for a given share snapshot according to filters."""
return IMPL.share_snapshot_access_get_all_for_share_snapshot(
context, share_snapshot_id, filters)
def share_snapshot_export_locations_get(context, snapshot_id):
"""Get all export locations for a given share snapshot."""
return IMPL.share_snapshot_export_locations_get(context, snapshot_id)
def share_snapshot_instance_access_update(
context, access_id, instance_id, updates):
"""Update the state of the share snapshot instance access."""
return IMPL.share_snapshot_instance_access_update(
context, access_id, instance_id, updates)
def share_snapshot_instance_access_get(context, share_snapshot_instance_id,
access_id):
"""Get the share snapshot instance access related to given ids."""
return IMPL.share_snapshot_instance_access_get(
context, share_snapshot_instance_id, access_id)
def share_snapshot_instance_access_delete(context, access_id,
snapshot_instance_id):
"""Delete share snapshot instance access given its id."""
return IMPL.share_snapshot_instance_access_delete(
context, access_id, snapshot_instance_id)
def share_snapshot_instance_export_location_create(context, values):
"""Create a share snapshot instance export location."""
return IMPL.share_snapshot_instance_export_location_create(context, values)
def share_snapshot_instance_export_locations_get_all(
context, share_snapshot_instance_id):
"""Get the share snapshot instance export locations for given id."""
return IMPL.share_snapshot_instance_export_locations_get_all(
context, share_snapshot_instance_id)
def share_snapshot_instance_export_location_get(context, el_id):
"""Get the share snapshot instance export location for given id."""
return IMPL.share_snapshot_instance_export_location_get(
context, el_id)
def share_snapshot_instance_export_location_delete(context, el_id):
"""Delete share snapshot instance export location given its id."""
return IMPL.share_snapshot_instance_export_location_delete(context, el_id)
###################
def security_service_create(context, values):
"""Create security service DB record."""
return IMPL.security_service_create(context, values)
def security_service_delete(context, id):
"""Delete security service DB record."""
return IMPL.security_service_delete(context, id)
def security_service_update(context, id, values):
"""Update security service DB record."""
return IMPL.security_service_update(context, id, values)
def security_service_get(context, id):
"""Get security service DB record."""
return IMPL.security_service_get(context, id)
def security_service_get_all(context):
"""Get all security service DB records."""
return IMPL.security_service_get_all(context)
def security_service_get_all_by_project(context, project_id):
"""Get all security service DB records for the given project."""
return IMPL.security_service_get_all_by_project(context, project_id)
####################
def share_metadata_get(context, share_id):
"""Get all metadata for a share."""
return IMPL.share_metadata_get(context, share_id)
def share_metadata_delete(context, share_id, key):
"""Delete the given metadata item."""
IMPL.share_metadata_delete(context, share_id, key)
def share_metadata_update(context, share, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.share_metadata_update(context, share, metadata, delete)
###################
def share_export_location_get_by_uuid(context, export_location_uuid):
"""Get specific export location of a share."""
return IMPL.share_export_location_get_by_uuid(
context, export_location_uuid)
def share_export_locations_get(context, share_id):
"""Get all export locations of a share."""
return IMPL.share_export_locations_get(context, share_id)
def share_export_locations_get_by_share_id(context, share_id,
include_admin_only=True,
ignore_migration_destination=False):
"""Get all export locations of a share by its ID."""
return IMPL.share_export_locations_get_by_share_id(
context, share_id, include_admin_only=include_admin_only,
ignore_migration_destination=ignore_migration_destination)
def share_export_locations_get_by_share_instance_id(context,
share_instance_id):
"""Get all export locations of a share instance by its ID."""
return IMPL.share_export_locations_get_by_share_instance_id(
context, share_instance_id)
def share_export_locations_update(context, share_instance_id, export_locations,
delete=True):
"""Update export locations of a share instance."""
return IMPL.share_export_locations_update(
context, share_instance_id, export_locations, delete)
####################
def export_location_metadata_get(context, export_location_uuid, session=None):
"""Get all metadata of an export location."""
return IMPL.export_location_metadata_get(
context, export_location_uuid, session=session)
def export_location_metadata_delete(context, export_location_uuid, keys,
session=None):
"""Delete metadata of an export location."""
return IMPL.export_location_metadata_delete(
context, export_location_uuid, keys, session=session)
def export_location_metadata_update(context, export_location_uuid, metadata,
delete, session=None):
"""Update metadata of an export location."""
return IMPL.export_location_metadata_update(
context, export_location_uuid, metadata, delete, session=session)
####################
def share_network_create(context, values):
"""Create a share network DB record."""
return IMPL.share_network_create(context, values)
def share_network_delete(context, id):
"""Delete a share network DB record."""
return IMPL.share_network_delete(context, id)
def share_network_update(context, id, values):
"""Update a share network DB record."""
return IMPL.share_network_update(context, id, values)
def share_network_get(context, id):
"""Get requested share network DB record."""
return IMPL.share_network_get(context, id)
def share_network_get_all(context):
"""Get all share network DB records."""
return IMPL.share_network_get_all(context)
def share_network_get_all_by_project(context, project_id):
"""Get all share network DB records for the given project."""
return IMPL.share_network_get_all_by_project(context, project_id)
def share_network_get_all_by_security_service(context, security_service_id):
"""Get all share network DB records for the given project."""
return IMPL.share_network_get_all_by_security_service(
context, security_service_id)
def share_network_add_security_service(context, id, security_service_id):
return IMPL.share_network_add_security_service(context,
id,
security_service_id)
def share_network_remove_security_service(context, id, security_service_id):
return IMPL.share_network_remove_security_service(context,
id,
security_service_id)
##################
def network_allocation_create(context, values):
"""Create a network allocation DB record."""
return IMPL.network_allocation_create(context, values)
def network_allocation_delete(context, id):
"""Delete a network allocation DB record."""
return IMPL.network_allocation_delete(context, id)
def network_allocation_update(context, id, values):
"""Update a network allocation DB record."""
return IMPL.network_allocation_update(context, id, values)
def network_allocations_get_for_share_server(context, share_server_id,
session=None, label=None):
"""Get network allocations for share server."""
return IMPL.network_allocations_get_for_share_server(
context, share_server_id, label=label, session=session)
def network_allocations_get_by_ip_address(context, ip_address):
"""Get network allocations by IP address."""
return IMPL.network_allocations_get_by_ip_address(context, ip_address)
##################
def share_server_create(context, values):
"""Create share server DB record."""
return IMPL.share_server_create(context, values)
def share_server_delete(context, id):
"""Delete share server DB record."""
return IMPL.share_server_delete(context, id)
def share_server_update(context, id, values):
"""Update share server DB record."""
return IMPL.share_server_update(context, id, values)
def share_server_get(context, id, session=None):
"""Get share server DB record by ID."""
return IMPL.share_server_get(context, id, session=session)
def share_server_get_all_by_host_and_share_net_valid(context, host,
share_net_id,
session=None):
"""Get share server DB records by host and share net not error."""
return IMPL.share_server_get_all_by_host_and_share_net_valid(
context, host, share_net_id, session=session)
def share_server_get_all(context):
"""Get all share server DB records."""
return IMPL.share_server_get_all(context)
def share_server_get_all_by_host(context, host):
"""Get all share servers related to particular host."""
return IMPL.share_server_get_all_by_host(context, host)
def share_server_get_all_unused_deletable(context, host, updated_before):
"""Get all free share servers DB records."""
return IMPL.share_server_get_all_unused_deletable(context, host,
updated_before)
def share_server_backend_details_set(context, share_server_id, server_details):
"""Create DB record with backend details."""
return IMPL.share_server_backend_details_set(context, share_server_id,
server_details)
##################
def share_type_create(context, values, projects=None):
"""Create a new share type."""
return IMPL.share_type_create(context, values, projects)
def share_type_get_all(context, inactive=False, filters=None):
"""Get all share types.
:param context: context to query under
:param inactive: Include inactive share types to the result set
:param filters: Filters for the query in the form of key/value.
:is_public: Filter share types based on visibility:
* **True**: List public share types only
* **False**: List private share types only
* **None**: List both public and private share types
:returns: list of matching share types
"""
return IMPL.share_type_get_all(context, inactive, filters)
def share_type_get(context, type_id, inactive=False, expected_fields=None):
"""Get share type by id.
:param context: context to query under
:param type_id: share type id to get.
:param inactive: Consider inactive share types when searching
:param expected_fields: Return those additional fields.
Supported fields are: projects.
:returns: share type
"""
return IMPL.share_type_get(context, type_id, inactive, expected_fields)
def share_type_get_by_name(context, name):
"""Get share type by name."""
return IMPL.share_type_get_by_name(context, name)
def share_type_get_by_name_or_id(context, name_or_id):
"""Get share type by name or ID and return None if not found."""
return IMPL.share_type_get_by_name_or_id(context, name_or_id)
def share_type_access_get_all(context, type_id):
"""Get all share type access of a share type."""
return IMPL.share_type_access_get_all(context, type_id)
def share_type_access_add(context, type_id, project_id):
"""Add share type access for project."""
return IMPL.share_type_access_add(context, type_id, project_id)
def share_type_access_remove(context, type_id, project_id):
"""Remove share type access for project."""
return IMPL.share_type_access_remove(context, type_id, project_id)
def share_type_destroy(context, id):
"""Delete a share type."""
return IMPL.share_type_destroy(context, id)
####################
def share_type_extra_specs_get(context, share_type_id):
"""Get all extra specs for a share type."""
return IMPL.share_type_extra_specs_get(context, share_type_id)
def share_type_extra_specs_delete(context, share_type_id, key):
"""Delete the given extra specs item."""
return IMPL.share_type_extra_specs_delete(context, share_type_id, key)
def share_type_extra_specs_update_or_create(context, share_type_id,
extra_specs):
"""Create or update share type extra specs.
This adds or modifies the key/value pairs specified in the extra
specs dict argument.
"""
return IMPL.share_type_extra_specs_update_or_create(context,
share_type_id,
extra_specs)
def driver_private_data_get(context, entity_id, key=None, default=None):
"""Get one, list or all key-value pairs for given entity_id."""
return IMPL.driver_private_data_get(context, entity_id, key, default)
def driver_private_data_update(context, entity_id, details,
delete_existing=False):
"""Update key-value pairs for given entity_id."""
return IMPL.driver_private_data_update(context, entity_id, details,
delete_existing)
def driver_private_data_delete(context, entity_id, key=None):
"""Remove one, list or all key-value pairs for given entity_id."""
return IMPL.driver_private_data_delete(context, entity_id, key)
####################
def availability_zone_get(context, id_or_name):
"""Get availability zone by name or id."""
return IMPL.availability_zone_get(context, id_or_name)
def availability_zone_get_all(context):
"""Get all active availability zones."""
return IMPL.availability_zone_get_all(context)
####################
def share_group_get(context, share_group_id):
"""Get a share group or raise if it does not exist."""
return IMPL.share_group_get(context, share_group_id)
def share_group_get_all(context, detailed=True, filters=None, sort_key=None,
sort_dir=None):
"""Get all share groups."""
return IMPL.share_group_get_all(
context, detailed=detailed, filters=filters, sort_key=sort_key,
sort_dir=sort_dir)
def share_group_get_all_by_host(context, host, detailed=True, filters=None,
sort_key=None, sort_dir=None):
"""Get all share groups belonging to a host."""
return IMPL.share_group_get_all_by_host(
context, host, detailed=detailed, filters=filters, sort_key=sort_key,
sort_dir=sort_dir)
def share_group_create(context, values):
"""Create a share group from the values dictionary."""
return IMPL.share_group_create(context, values)
def share_group_get_all_by_share_server(context, share_server_id,
filters=None, sort_key=None,
sort_dir=None):
"""Get all share groups associated with a share server."""
return IMPL.share_group_get_all_by_share_server(
context, share_server_id, filters=filters, sort_key=sort_key,
sort_dir=sort_dir)
def share_group_get_all_by_project(context, project_id, detailed=True,
filters=None, sort_key=None,
sort_dir=None):
"""Get all share groups belonging to a project."""
return IMPL.share_group_get_all_by_project(
context, project_id, detailed=detailed, filters=filters,
sort_key=sort_key, sort_dir=sort_dir)
def share_group_update(context, share_group_id, values):
"""Set the given properties on a share group and update it.
Raises NotFound if share group does not exist.
"""
return IMPL.share_group_update(context, share_group_id, values)
def share_group_destroy(context, share_group_id):
"""Destroy the share group or raise if it does not exist."""
return IMPL.share_group_destroy(context, share_group_id)
def count_shares_in_share_group(context, share_group_id):
"""Returns the number of undeleted shares with the specified group."""
return IMPL.count_shares_in_share_group(context, share_group_id)
def get_all_shares_by_share_group(context, share_group_id):
return IMPL.get_all_shares_by_share_group(context, share_group_id)
def count_share_group_snapshots_in_share_group(context, share_group_id):
"""Returns the number of sg snapshots with the specified share group."""
return IMPL.count_share_group_snapshots_in_share_group(
context, share_group_id)
def count_share_groups_in_share_network(context, share_network_id,
session=None):
"""Return the number of groups with the specified share network."""
return IMPL.count_share_groups_in_share_network(context, share_network_id)
def count_share_group_snapshot_members_in_share(context, share_id,
session=None):
"""Returns the number of group snapshot members linked to the share."""
return IMPL.count_share_group_snapshot_members_in_share(context, share_id)
def share_group_snapshot_get(context, share_group_snapshot_id):
"""Get a share group snapshot."""
return IMPL.share_group_snapshot_get(context, share_group_snapshot_id)
def share_group_snapshot_get_all(context, detailed=True, filters=None,
sort_key=None, sort_dir=None):
"""Get all share group snapshots."""
return IMPL.share_group_snapshot_get_all(
context, detailed=detailed, filters=filters, sort_key=sort_key,
sort_dir=sort_dir)
def share_group_snapshot_get_all_by_project(context, project_id, detailed=True,
filters=None, sort_key=None,
sort_dir=None):
"""Get all share group snapshots belonging to a project."""
return IMPL.share_group_snapshot_get_all_by_project(
context, project_id, detailed=detailed, filters=filters,
sort_key=sort_key, sort_dir=sort_dir)
def share_group_snapshot_create(context, values):
"""Create a share group snapshot from the values dictionary."""
return IMPL.share_group_snapshot_create(context, values)
def share_group_snapshot_update(context, share_group_snapshot_id, values):
"""Set the given properties on a share group snapshot and update it.
Raises NotFound if share group snapshot does not exist.
"""
return IMPL.share_group_snapshot_update(
context, share_group_snapshot_id, values)
def share_group_snapshot_destroy(context, share_group_snapshot_id):
"""Destroy the share_group_snapshot or raise if it does not exist."""
return IMPL.share_group_snapshot_destroy(context, share_group_snapshot_id)
def share_group_snapshot_members_get_all(context, share_group_snapshot_id):
"""Return the members of a share group snapshot."""
return IMPL.share_group_snapshot_members_get_all(
context, share_group_snapshot_id)
def share_group_snapshot_member_create(context, values):
"""Create a share group snapshot member from the values dictionary."""
return IMPL.share_group_snapshot_member_create(context, values)
def share_group_snapshot_member_update(context, member_id, values):
"""Set the given properties on a share group snapshot member and update it.
Raises NotFound if share_group_snapshot member does not exist.
"""
return IMPL.share_group_snapshot_member_update(context, member_id, values)
####################
def share_replicas_get_all(context, with_share_server=False,
with_share_data=False):
"""Returns all share replicas regardless of share."""
return IMPL.share_replicas_get_all(
context, with_share_server=with_share_server,
with_share_data=with_share_data)
def share_replicas_get_all_by_share(context, share_id, with_share_server=False,
with_share_data=False):
"""Returns all share replicas for a given share."""
return IMPL.share_replicas_get_all_by_share(
context, share_id, with_share_server=with_share_server,
with_share_data=with_share_data)
def share_replicas_get_available_active_replica(context, share_id,
with_share_server=False,
with_share_data=False):
"""Returns an active replica for a given share."""
return IMPL.share_replicas_get_available_active_replica(
context, share_id, with_share_server=with_share_server,
with_share_data=with_share_data)
def share_replica_get(context, replica_id, with_share_server=False,
with_share_data=False):
"""Get share replica by id."""
return IMPL.share_replica_get(
context, replica_id, with_share_server=with_share_server,
with_share_data=with_share_data)
def share_replica_update(context, share_replica_id, values,
with_share_data=False):
"""Updates a share replica with given values."""
return IMPL.share_replica_update(context, share_replica_id, values,
with_share_data=with_share_data)
def share_replica_delete(context, share_replica_id):
"""Deletes a share replica."""
return IMPL.share_replica_delete(context, share_replica_id)
def purge_deleted_records(context, age_in_days):
"""Purge deleted rows older than given age from all tables
:raises: InvalidParameterValue if age_in_days is incorrect.
"""
return IMPL.purge_deleted_records(context, age_in_days=age_in_days)
####################
def share_group_type_create(context, values, projects=None):
"""Create a new share group type."""
return IMPL.share_group_type_create(context, values, projects)
def share_group_type_get_all(context, inactive=False, filters=None):
"""Get all share group types.
:param context: context to query under
:param inactive: Include inactive share group types to the result set
:param filters: Filters for the query in the form of key/value.
:is_public: Filter share group types based on visibility:
* **True**: List public group types only
* **False**: List private group types only
* **None**: List both public and private group types
:returns: list of matching share group types
"""
return IMPL.share_group_type_get_all(context, inactive, filters)
def share_group_type_get(context, type_id, inactive=False,
expected_fields=None):
"""Get share_group type by id.
:param context: context to query under
:param type_id: group type id to get.
:param inactive: Consider inactive group types when searching
:param expected_fields: Return those additional fields.
Supported fields are: projects.
:returns: share group type
"""
return IMPL.share_group_type_get(
context, type_id, inactive, expected_fields)
def share_group_type_get_by_name(context, name):
"""Get share group type by name."""
return IMPL.share_group_type_get_by_name(context, name)
def share_group_type_access_get_all(context, type_id):
"""Get all share group type access of a share group type."""
return IMPL.share_group_type_access_get_all(context, type_id)
def share_group_type_access_add(context, type_id, project_id):
"""Add share group type access for project."""
return IMPL.share_group_type_access_add(context, type_id, project_id)
def share_group_type_access_remove(context, type_id, project_id):
"""Remove share group type access for project."""
return IMPL.share_group_type_access_remove(context, type_id, project_id)
def share_group_type_destroy(context, type_id):
"""Delete a share group type."""
return IMPL.share_group_type_destroy(context, type_id)
def share_group_type_specs_get(context, type_id):
"""Get all group specs for a share group type."""
return IMPL.share_group_type_specs_get(context, type_id)
def share_group_type_specs_delete(context, type_id, key):
"""Delete the given group specs item."""
return IMPL.share_group_type_specs_delete(context, type_id, key)
def share_group_type_specs_update_or_create(context, type_id, group_specs):
"""Create or update share group type specs.
This adds or modifies the key/value pairs specified in the group
specs dict argument.
"""
return IMPL.share_group_type_specs_update_or_create(
context, type_id, group_specs)
| 36.025357
| 79
| 0.703942
|
16a7d8514eedbe4bb8befe8ce33d4255d7a8e120
| 8,284
|
py
|
Python
|
usr/local/lib/python3.6/dist-packages/pymux/stream.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | null | null | null |
usr/local/lib/python3.6/dist-packages/pymux/stream.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | 1
|
2018-04-04T12:13:40.000Z
|
2018-05-03T07:57:52.000Z
|
usr/local/lib/python3.6/dist-packages/pymux/stream.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | null | null | null |
"""
Improvements on Pyte.
"""
from __future__ import unicode_literals
from pyte.streams import Stream
from pyte.escape import NEL
from pyte import control as ctrl
from collections import defaultdict
from .log import logger
import re
__all__ = (
'BetterStream',
)
class BetterStream(Stream):
"""
Extension to the Pyte `Stream` class that also handles "Esc]<num>...BEL"
sequences. This is used by xterm to set the terminal title.
"""
escape = Stream.escape.copy()
escape.update({
# Call next_line instead of line_feed. We always want to go to the left
# margin if we receive this, unlike \n, which goes one row down.
# (Except when LNM has been set.)
NEL: "next_line",
})
def __init__(self, screen):
super(BetterStream, self).__init__()
self.listener = screen
self._validate_screen()
# Create a regular expression pattern that matches everything what can
# be considered plain text. This can be used as a very simple lexer
# that can feed the "plain text part" as one token into the screen.
special = set([ctrl.ESC, ctrl.CSI, ctrl.NUL, ctrl.DEL]) | set(self.basic)
self._text_search = re.compile(
'[^%s]+' % ''.join(re.escape(c) for c in special)).match
# Start parser.
self._parser = self._parser_generator()
self._taking_plain_text = self._parser.send(None)
self._send = self._parser.send
def _validate_screen(self):
"""
Check whether our Screen class has all the required callbacks.
(We want to verify this statically, before feeding content to the
screen.)
"""
for d in [self.basic, self.escape, self.sharp, self.percent, self.csi]:
for name in d.values():
assert hasattr(self.listener, name), 'Screen is missing %r' % name
def feed(self, chars):
"""
Custom, much more efficient 'feed' function.
Feed a string of characters to the parser.
"""
# The original implementation of this function looked like this::
#
# for c in chars:
# self._send(c)
#
# However, the implementation below does a big optimization. If the
# parser is possibly expecting a chunk of text (when it is not inside a
# ESC or CSI escape sequence), then we send that fragment directly to
# the 'draw' method of the screen.
# Local copy of functions. (For faster lookups.)
send = self._send
taking_plain_text = self._taking_plain_text
text_search = self._text_search
draw = self.listener.draw
# Loop through the chars.
i = 0
count = len(chars)
while i < count:
# Reading plain text? Don't send characters one by one in the
# generator, but optimize and send the whole chunk without
# escapes directly to the listener.
if taking_plain_text:
match = text_search(chars, i)
if match:
start, i = match.span()
draw(chars[start:i])
else:
taking_plain_text = False
# The parser expects just one character now. Just send the next one.
else:
taking_plain_text = send(chars[i])
i += 1
# Remember state for the next 'feed()'.
self._taking_plain_text = taking_plain_text
def _parser_generator(self):
"""
Coroutine that processes VT100 output.
It's actually a state machine, implemented as a coroutine. So all the
'state' that we have is stored in local variables.
This generator is not the most beautiful, but it is as performant as
possible. When a process generates a lot of output (That is often much
more than a person would give as input), then this will be the
bottleneck, because it processes just one character at a time.
We did many manual optimizations to this function in order to make it
as efficient as possible. Don't change anything without profiling
first.
"""
listener = self.listener
basic = self.basic
escape = self.escape
sharp = self.sharp
percent = self.percent
csi = self.csi
ESC = ctrl.ESC
CSI = ctrl.CSI
CTRL_SEQUENCES_ALLOWED_IN_CSI = set([
ctrl.BEL, ctrl.BS, ctrl.HT, ctrl.LF, ctrl.VT, ctrl.FF, ctrl.CR])
def create_dispatch_dictionary(source_dict):
# In order to avoid getting KeyError exceptions below, we make sure
# that these dictionaries have a dummy handler.
def dummy(*a, **kw):
pass
return defaultdict(
lambda: dummy,
dict((event, getattr(listener, attr)) for event, attr in source_dict.items()))
basic_dispatch = create_dispatch_dictionary(basic)
sharp_dispatch = create_dispatch_dictionary(sharp)
percent_dispatch = create_dispatch_dictionary(percent)
escape_dispatch = create_dispatch_dictionary(escape)
csi_dispatch = create_dispatch_dictionary(csi)
while True:
char = yield True # (`True` tells the 'send()' function that it
# is allowed to send chunks of plain text
# directly to the listener, instead of this generator.)
if char == ESC: # \x1b
char = yield
if char == '[':
char = CSI # Go to CSI.
else:
if char == '#':
sharp_dispatch[(yield)]()
elif char == '%':
percent_dispatch[(yield)]()
elif char in '()':
listener.set_charset((yield), mode=char)
elif char == ']':
data = []
while True:
c = yield
if c == '\07':
break
else:
data.append(c)
listener.square_close(''.join(data))
else:
escape_dispatch[char]()
continue # Do not go to CSI.
if char in basic: # 'if', not 'elif', because we need to be
# able to jump here from Esc[ above in the CSI
# section below.
basic_dispatch[char]()
elif char == CSI: # \x9b
current = ''
params = []
private = False
while True:
char = yield
if char == '?':
private = True
elif char in CTRL_SEQUENCES_ALLOWED_IN_CSI:
basic_dispatch[char]()
elif char in (ctrl.SP, '>'):
# Ignore '>' because of 'ESC[>c' (Send device attributes.)
pass
elif char.isdigit():
current += char
else:
params.append(min(int(current or 0), 9999))
if char == ';':
current = ''
else:
try:
if private:
csi_dispatch[char](*params, private=True)
else:
csi_dispatch[char](*params)
except TypeError:
# Handler doesn't take params or private attribute.
# (Not the cleanest way to handle this, but
# it's safe and performant enough.)
logger.warning('Dispatch %s failed. params=%s, private=%s',
params, private)
break # Break outside CSI loop.
| 37.826484
| 94
| 0.516176
|
b20dea5cc5b5ab522587da64fa1028aabc5d1f39
| 698
|
py
|
Python
|
Q153FindMinimuminRotatedSortedArray.py
|
ChenliangLi205/LeetCode
|
6c547c338eb05042cb68f57f737dce483964e2fd
|
[
"MIT"
] | null | null | null |
Q153FindMinimuminRotatedSortedArray.py
|
ChenliangLi205/LeetCode
|
6c547c338eb05042cb68f57f737dce483964e2fd
|
[
"MIT"
] | null | null | null |
Q153FindMinimuminRotatedSortedArray.py
|
ChenliangLi205/LeetCode
|
6c547c338eb05042cb68f57f737dce483964e2fd
|
[
"MIT"
] | null | null | null |
class Solution(object):
def findMin(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) == 0:
return 0
if len(nums) == 1:
return nums[0]
if(nums[-1] > nums[0]):
return nums[0]
if(nums[-1] < nums[-2]):
return nums[-1]
lastPtr = len(nums)-1
firstPtr = 0
while(lastPtr >= firstPtr):
if(nums[firstPtr] > nums[firstPtr+1]):
return nums[firstPtr+1]
if(nums[lastPtr] < nums[lastPtr-1]):
return nums[lastPtr]
lastPtr -= 1
firstPtr += 1
return 0
| 26.846154
| 50
| 0.429799
|
90997ba5d0f48bfd02438dbd1da28f9572c546eb
| 19,535
|
py
|
Python
|
src/lib/opts_deploy.py
|
gordonjun2/CenterNet-1
|
82c8fcf30faf9ea5bbbf84dda55ff57db3f089b5
|
[
"MIT"
] | null | null | null |
src/lib/opts_deploy.py
|
gordonjun2/CenterNet-1
|
82c8fcf30faf9ea5bbbf84dda55ff57db3f089b5
|
[
"MIT"
] | null | null | null |
src/lib/opts_deploy.py
|
gordonjun2/CenterNet-1
|
82c8fcf30faf9ea5bbbf84dda55ff57db3f089b5
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
class opts(object):
def __init__(self):
self.parser = argparse.ArgumentParser()
# basic experiment setting
self.parser.add_argument('--task', default='ctdet_drone',
help='ctdet | ddd | multi_pose | exdet')
self.parser.add_argument('--dataset', default='visdrone',
help='coco | kitti | coco_hp | pascal | coco_tensorboard_added | pascal_coco_tensorboard_added')
self.parser.add_argument('--exp_id', default='default')
self.parser.add_argument('--test', action='store_true')
self.parser.add_argument('--debug', type=int, default=0,
help='level of visualization.'
'1: only show the final detection results'
'2: show the network output features'
'3: use matplot to display' # useful when lunching training with ipython notebook
'4: save all visualizations to disk')
self.parser.add_argument('--demo', default='',
help='path to image/ image folders/ video. '
'or "webcam"')
self.parser.add_argument('--load_model', default='',
help='path to pretrained model')
self.parser.add_argument('--resume', action='store_true',
help='resume an experiment. '
'Reloaded the optimizer parameter and '
'set load_model to model_last.pth '
'in the exp dir if load_model is empty.')
# system
self.parser.add_argument('--gpus', default='0',
help='-1 for CPU, use comma for multiple gpus')
self.parser.add_argument('--num_workers', type=int, default=4,
help='dataloader threads. 0 for single-thread.')
self.parser.add_argument('--not_cuda_benchmark', action='store_true',
help='disable when the input size is not fixed.')
self.parser.add_argument('--seed', type=int, default=317,
help='random seed') # from CornerNet
# log
self.parser.add_argument('--print_iter', type=int, default=0,
help='disable progress bar and print to screen.')
self.parser.add_argument('--hide_data_time', action='store_true',
help='not display time during training.')
self.parser.add_argument('--save_all', action='store_true',
help='save model to disk every 5 epochs.')
self.parser.add_argument('--metric', default='loss',
help='main metric to save best model')
self.parser.add_argument('--vis_thresh', type=float, default=0.3,
help='visualization threshold.')
self.parser.add_argument('--debugger_theme', default='white',
choices=['white', 'black'])
# model
self.parser.add_argument('--arch', default='dla_34',
help='model architecture. Currently tested'
'res_18 | res_101 | resdcn_18 | resdcn_101 |'
'dlav0_34 | dla_34 | hourglass')
self.parser.add_argument('--head_conv', type=int, default=-1,
help='conv layer channels for output head'
'0 for no conv layer'
'-1 for default setting: '
'64 for resnets and 256 for dla.')
self.parser.add_argument('--down_ratio', type=int, default=4,
help='output stride. Currently only supports 4.')
# input
self.parser.add_argument('--input_res', type=int, default=-1,
help='input height and width. -1 for default from '
'dataset. Will be overriden by input_h | input_w')
self.parser.add_argument('--input_h', type=int, default=-1,
help='input height. -1 for default from dataset.')
self.parser.add_argument('--input_w', type=int, default=-1,
help='input width. -1 for default from dataset.')
# train
self.parser.add_argument('--lr', type=float, default=1.25e-4,
help='learning rate for batch size 32.')
self.parser.add_argument('--lr_step', type=str, default='90,120',
help='drop learning rate by 10.')
self.parser.add_argument('--num_epochs', type=int, default=140,
help='total training epochs.')
self.parser.add_argument('--batch_size', type=int, default=32,
help='batch size')
self.parser.add_argument('--master_batch_size', type=int, default=-1,
help='batch size on the master gpu.')
self.parser.add_argument('--num_iters', type=int, default=-1,
help='default: #samples / batch_size.')
self.parser.add_argument('--val_intervals', type=int, default=1,
help='number of epochs to run validation.')
self.parser.add_argument('--trainval', action='store_true',
help='include validation in training and '
'test on test set')
self.parser.add_argument("--es", dest="es", default=True, type=bool)
# test
self.parser.add_argument('--flip_test', action='store_true',
help='flip data augmentation.')
self.parser.add_argument('--test_scales', type=str, default='1',
help='multi scale test augmentation.')
self.parser.add_argument('--nms', action='store_true',
help='run nms in testing.')
self.parser.add_argument('--K', type=int, default=100,
help='max number of output objects.')
self.parser.add_argument('--not_prefetch_test', action='store_true',
help='not use parallal data pre-processing.')
self.parser.add_argument('--fix_res', action='store_true',
help='fix testing resolution or keep '
'the original resolution')
self.parser.add_argument('--keep_res', action='store_true',
help='keep the original resolution'
' during validation.')
# dataset
self.parser.add_argument('--not_rand_crop', action='store_true',
help='not use the random crop data augmentation'
'from CornerNet.')
self.parser.add_argument('--shift', type=float, default=0.1,
help='when not using random crop'
'apply shift augmentation.')
self.parser.add_argument('--scale', type=float, default=0.4,
help='when not using random crop'
'apply scale augmentation.')
self.parser.add_argument('--rotate', type=float, default=0,
help='when not using random crop'
'apply rotation augmentation.')
self.parser.add_argument('--flip', type = float, default=0.5,
help='probability of applying flip augmentation.')
self.parser.add_argument('--no_color_aug', action='store_true',
help='not use the color augmenation '
'from CornerNet')
# multi_pose
self.parser.add_argument('--aug_rot', type=float, default=0,
help='probability of applying '
'rotation augmentation.')
# ddd
self.parser.add_argument('--aug_ddd', type=float, default=0.5,
help='probability of applying crop augmentation.')
self.parser.add_argument('--rect_mask', action='store_true',
help='for ignored object, apply mask on the '
'rectangular region or just center point.')
self.parser.add_argument('--kitti_split', default='3dop',
help='different validation split for kitti: '
'3dop | subcnn')
# loss
self.parser.add_argument('--mse_loss', action='store_true',
help='use mse loss or focal loss to train '
'keypoint heatmaps.')
# ctdet
self.parser.add_argument('--reg_loss', default='l1',
help='regression loss: sl1 | l1 | l2')
self.parser.add_argument('--hm_weight', type=float, default=1,
help='loss weight for keypoint heatmaps.')
self.parser.add_argument('--off_weight', type=float, default=1,
help='loss weight for keypoint local offsets.')
self.parser.add_argument('--wh_weight', type=float, default=0.1,
help='loss weight for bounding box size.')
# multi_pose
self.parser.add_argument('--hp_weight', type=float, default=1,
help='loss weight for human pose offset.')
self.parser.add_argument('--hm_hp_weight', type=float, default=1,
help='loss weight for human keypoint heatmap.')
# ddd
self.parser.add_argument('--dep_weight', type=float, default=1,
help='loss weight for depth.')
self.parser.add_argument('--dim_weight', type=float, default=1,
help='loss weight for 3d bounding box size.')
self.parser.add_argument('--rot_weight', type=float, default=1,
help='loss weight for orientation.')
self.parser.add_argument('--peak_thresh', type=float, default=0.2)
# task
# ctdet
self.parser.add_argument('--norm_wh', action='store_true',
help='L1(\hat(y) / y, 1) or L1(\hat(y), y)')
self.parser.add_argument('--dense_wh', action='store_true',
help='apply weighted regression near center or '
'just apply regression on center point.')
self.parser.add_argument('--cat_spec_wh', action='store_true',
help='category specific bounding box size.')
self.parser.add_argument('--not_reg_offset', action='store_true',
help='not regress local offset.')
# exdet
self.parser.add_argument('--agnostic_ex', action='store_true',
help='use category agnostic extreme points.')
self.parser.add_argument('--scores_thresh', type=float, default=0.1,
help='threshold for extreme point heatmap.')
self.parser.add_argument('--center_thresh', type=float, default=0.1,
help='threshold for centermap.')
self.parser.add_argument('--aggr_weight', type=float, default=0.0,
help='edge aggregation weight.')
# multi_pose
self.parser.add_argument('--dense_hp', action='store_true',
help='apply weighted pose regression near center '
'or just apply regression on center point.')
self.parser.add_argument('--not_hm_hp', action='store_true',
help='not estimate human joint heatmap, '
'directly use the joint offset from center.')
self.parser.add_argument('--not_reg_hp_offset', action='store_true',
help='not regress local offset for '
'human joint heatmaps.')
self.parser.add_argument('--not_reg_bbox', action='store_true',
help='not regression bounding box size.')
# ground truth validation
self.parser.add_argument('--eval_oracle_hm', action='store_true',
help='use ground center heatmap.')
self.parser.add_argument('--eval_oracle_wh', action='store_true',
help='use ground truth bounding box size.')
self.parser.add_argument('--eval_oracle_offset', action='store_true',
help='use ground truth local heatmap offset.')
self.parser.add_argument('--eval_oracle_kps', action='store_true',
help='use ground truth human pose offset.')
self.parser.add_argument('--eval_oracle_hmhp', action='store_true',
help='use ground truth human joint heatmaps.')
self.parser.add_argument('--eval_oracle_hp_offset', action='store_true',
help='use ground truth human joint local offset.')
self.parser.add_argument('--eval_oracle_dep', action='store_true',
help='use ground truth depth.')
def parse(self, args=''):
if args == '':
opt = self.parser.parse_args()
else:
opt = self.parser.parse_args(args)
opt.gpus_str = opt.gpus
opt.gpus = [int(gpu) for gpu in opt.gpus.split(',')]
opt.gpus = [i for i in range(len(opt.gpus))] if opt.gpus[0] >=0 else [-1]
opt.lr_step = [int(i) for i in opt.lr_step.split(',')]
opt.test_scales = [float(i) for i in opt.test_scales.split(',')]
opt.fix_res = not opt.keep_res
print('Fix size testing.' if opt.fix_res else 'Keep resolution testing.')
opt.reg_offset = not opt.not_reg_offset
opt.reg_bbox = not opt.not_reg_bbox
opt.hm_hp = not opt.not_hm_hp
opt.reg_hp_offset = (not opt.not_reg_hp_offset) and opt.hm_hp
if opt.head_conv == -1: # init default head_conv
opt.head_conv = 256 if 'dla' in opt.arch else 64
opt.pad = 127 if 'hourglass' in opt.arch else 31
opt.num_stacks = 2 if opt.arch == 'hourglass' else 1
if opt.trainval:
opt.val_intervals = 100000000
if opt.debug > 0:
opt.num_workers = 0
opt.batch_size = 1
opt.gpus = [opt.gpus[0]]
opt.master_batch_size = -1
if opt.master_batch_size == -1:
opt.master_batch_size = opt.batch_size // len(opt.gpus)
rest_batch_size = (opt.batch_size - opt.master_batch_size)
opt.chunk_sizes = [opt.master_batch_size]
for i in range(len(opt.gpus) - 1):
slave_chunk_size = rest_batch_size // (len(opt.gpus) - 1)
if i < rest_batch_size % (len(opt.gpus) - 1):
slave_chunk_size += 1
opt.chunk_sizes.append(slave_chunk_size)
print('training chunk_sizes:', opt.chunk_sizes)
opt.root_dir = os.path.join(os.path.dirname(__file__), '..', '..')
opt.data_dir = os.path.join(opt.root_dir, 'data')
opt.exp_dir = os.path.join(opt.root_dir, 'exp', opt.task)
opt.save_dir = os.path.join(opt.exp_dir, opt.exp_id)
opt.debug_dir = os.path.join(opt.save_dir, 'debug')
print('The output will be saved to ', opt.save_dir)
if opt.resume and opt.load_model == '':
model_path = opt.save_dir[:-4] if opt.save_dir.endswith('TEST') \
else opt.save_dir
opt.load_model = os.path.join(model_path, 'model_last.pth')
return opt
def update_dataset_info_and_set_heads(self, opt, dataset):
input_h, input_w = dataset.default_resolution
opt.mean, opt.std = dataset.mean, dataset.std
opt.num_classes = dataset.num_classes
# input_h(w): opt.input_h overrides opt.input_res overrides dataset default
input_h = opt.input_res if opt.input_res > 0 else input_h
input_w = opt.input_res if opt.input_res > 0 else input_w
opt.input_h = opt.input_h if opt.input_h > 0 else input_h
opt.input_w = opt.input_w if opt.input_w > 0 else input_w
opt.output_h = opt.input_h // opt.down_ratio
opt.output_w = opt.input_w // opt.down_ratio
opt.input_res = max(opt.input_h, opt.input_w)
opt.output_res = max(opt.output_h, opt.output_w)
if opt.task == 'exdet':
# assert opt.dataset in ['coco']
num_hm = 1 if opt.agnostic_ex else opt.num_classes
opt.heads = {'hm_t': num_hm, 'hm_l': num_hm,
'hm_b': num_hm, 'hm_r': num_hm,
'hm_c': opt.num_classes}
if opt.reg_offset:
opt.heads.update({'reg_t': 2, 'reg_l': 2, 'reg_b': 2, 'reg_r': 2})
elif opt.task == 'ddd':
# assert opt.dataset in ['gta', 'kitti', 'viper']
opt.heads = {'hm': opt.num_classes, 'dep': 1, 'rot': 8, 'dim': 3}
if opt.reg_bbox:
opt.heads.update(
{'wh': 2})
if opt.reg_offset:
opt.heads.update({'reg': 2})
elif opt.task == 'ctdet':
# assert opt.dataset in ['pascal', 'coco']
opt.heads = {'hm': opt.num_classes,
'wh': 2 if not opt.cat_spec_wh else 2 * opt.num_classes}
if opt.reg_offset:
opt.heads.update({'reg': 2})
elif opt.task == 'multi_pose':
# assert opt.dataset in ['coco_hp']
opt.flip_idx = dataset.flip_idx
opt.heads = {'hm': opt.num_classes, 'wh': 2, 'hps': 34}
if opt.reg_offset:
opt.heads.update({'reg': 2})
if opt.hm_hp:
opt.heads.update({'hm_hp': 17})
if opt.reg_hp_offset:
opt.heads.update({'hp_offset': 2})
elif opt.task == 'ctdet_drone':
opt.heads = {'hm': opt.num_classes,
'wh': 2 if not opt.cat_spec_wh else 2 * opt.num_classes}
if opt.reg_offset:
opt.heads.update({'reg': 2})
else:
assert 0, 'task not defined!'
print('heads', opt.heads)
return opt
def init(self, args=''):
default_dataset_info = {
# 'ctdet': {'default_resolution': [512, 512], 'num_classes': 20, # changed from 80 to 20
# 'mean': [0.408, 0.447, 0.470], 'std': [0.289, 0.274, 0.278],
# 'dataset': 'pascal_coco_tensorboard_added'},
'ctdet': {'default_resolution': [512, 512], 'num_classes': 80, # changed from 80 to 20
'mean': [0.408, 0.447, 0.470], 'std': [0.289, 0.274, 0.278],
'dataset': 'coco'},
'exdet': {'default_resolution': [512, 512], 'num_classes': 80,
'mean': [0.408, 0.447, 0.470], 'std': [0.289, 0.274, 0.278],
'dataset': 'coco'},
'multi_pose': {
'default_resolution': [512, 512], 'num_classes': 1,
'mean': [0.408, 0.447, 0.470], 'std': [0.289, 0.274, 0.278],
'dataset': 'coco_hp', 'num_joints': 17,
'flip_idx': [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10],
[11, 12], [13, 14], [15, 16]]},
'ddd': {'default_resolution': [384, 1280], 'num_classes': 3,
'mean': [0.485, 0.456, 0.406], 'std': [0.229, 0.224, 0.225],
'dataset': 'kitti'},
'ctdet_drone': {'default_resolution': [512, 512], 'num_classes': 2,
'mean': [0.408, 0.447, 0.470], 'std': [0.289, 0.274, 0.278],
'dataset': 'visdrone'},
}
class Struct:
def __init__(self, entries):
for k, v in entries.items():
self.__setattr__(k, v)
opt = self.parse(args)
dataset = Struct(default_dataset_info[opt.task])
opt.dataset = dataset.dataset
opt = self.update_dataset_info_and_set_heads(opt, dataset)
return opt
| 51.816976
| 125
| 0.55956
|
601dac3a1208b92d411eced1db3178198ae83ee2
| 282,172
|
py
|
Python
|
pandas/core/frame.py
|
maheshbapatu/pandas
|
308e1cbb64869fe5ce2fbd921e7148b4705bbdd1
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/core/frame.py
|
maheshbapatu/pandas
|
308e1cbb64869fe5ce2fbd921e7148b4705bbdd1
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/core/frame.py
|
maheshbapatu/pandas
|
308e1cbb64869fe5ce2fbd921e7148b4705bbdd1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
DataFrame
---------
An efficient 2D container for potentially mixed-type time series or other
labeled data series.
Similar to its R counterpart, data.frame, except providing automatic data
alignment and a host of useful data manipulation methods having to do with the
labeling information
"""
import collections
from collections import abc
from io import StringIO
import itertools
import sys
from textwrap import dedent
from typing import (
FrozenSet,
Hashable,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
)
import warnings
import numpy as np
import numpy.ma as ma
from pandas._config import get_option
from pandas._libs import algos as libalgos, lib
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, Substitution, rewrite_axis_style_signature
from pandas.util._validators import (
validate_axis_style_args,
validate_bool_kwarg,
validate_percentile,
)
from pandas.core.dtypes.cast import (
cast_scalar_to_array,
coerce_to_dtypes,
find_common_type,
infer_dtype_from_scalar,
invalidate_string_dtypes,
maybe_cast_to_datetime,
maybe_convert_platform,
maybe_downcast_to_dtype,
maybe_infer_to_datetimelike,
maybe_upcast,
maybe_upcast_putmask,
)
from pandas.core.dtypes.common import (
ensure_float64,
ensure_int64,
ensure_platform_int,
infer_dtype_from_object,
is_bool_dtype,
is_dict_like,
is_dtype_equal,
is_extension_array_dtype,
is_float_dtype,
is_hashable,
is_integer,
is_integer_dtype,
is_iterator,
is_list_like,
is_named_tuple,
is_object_dtype,
is_scalar,
is_sequence,
needs_i8_conversion,
)
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCIndexClass,
ABCMultiIndex,
ABCSeries,
)
from pandas.core.dtypes.missing import isna, notna
from pandas._typing import Axes, Dtype, FilePathOrBuffer
from pandas.core import algorithms, common as com, nanops, ops
from pandas.core.accessor import CachedAccessor
from pandas.core.arrays import Categorical, ExtensionArray
from pandas.core.arrays.datetimelike import DatetimeLikeArrayMixin as DatetimeLikeArray
from pandas.core.arrays.sparse import SparseFrameAccessor
from pandas.core.generic import NDFrame, _shared_docs
from pandas.core.index import Index, ensure_index, ensure_index_from_sequences
from pandas.core.indexes import base as ibase
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.multi import maybe_droplevels
from pandas.core.indexes.period import PeriodIndex
from pandas.core.indexing import check_bool_indexer, convert_to_index_sliceable
from pandas.core.internals import BlockManager
from pandas.core.internals.construction import (
arrays_to_mgr,
get_names_from_index,
init_dict,
init_ndarray,
masked_rec_array_to_mgr,
reorder_arrays,
sanitize_index,
to_arrays,
)
from pandas.core.ops.missing import dispatch_fill_zeros
from pandas.core.series import Series
from pandas.io.formats import console, format as fmt
from pandas.io.formats.printing import pprint_thing
import pandas.plotting
# ---------------------------------------------------------------------
# Docstring templates
_shared_doc_kwargs = dict(
axes="index, columns",
klass="DataFrame",
axes_single_arg="{0 or 'index', 1 or 'columns'}",
axis="""axis : {0 or 'index', 1 or 'columns'}, default 0
If 0 or 'index': apply function to each column.
If 1 or 'columns': apply function to each row.""",
optional_by="""
by : str or list of str
Name or list of names to sort by.
- if `axis` is 0 or `'index'` then `by` may contain index
levels and/or column labels
- if `axis` is 1 or `'columns'` then `by` may contain column
levels and/or index labels
.. versionchanged:: 0.23.0
Allow specifying index or column level names.""",
versionadded_to_excel="",
optional_labels="""labels : array-like, optional
New labels / index to conform the axis specified by 'axis' to.""",
optional_axis="""axis : int or str, optional
Axis to target. Can be either the axis name ('index', 'columns')
or number (0, 1).""",
)
_numeric_only_doc = """numeric_only : boolean, default None
Include only float, int, boolean data. If None, will attempt to use
everything, then use only numeric data
"""
_merge_doc = """
Merge DataFrame or named Series objects with a database-style join.
The join is done on columns or indexes. If joining columns on
columns, the DataFrame indexes *will be ignored*. Otherwise if joining indexes
on indexes or indexes on a column or columns, the index will be passed on.
Parameters
----------%s
right : DataFrame or named Series
Object to merge with.
how : {'left', 'right', 'outer', 'inner'}, default 'inner'
Type of merge to be performed.
* left: use only keys from left frame, similar to a SQL left outer join;
preserve key order.
* right: use only keys from right frame, similar to a SQL right outer join;
preserve key order.
* outer: use union of keys from both frames, similar to a SQL full outer
join; sort keys lexicographically.
* inner: use intersection of keys from both frames, similar to a SQL inner
join; preserve the order of the left keys.
on : label or list
Column or index level names to join on. These must be found in both
DataFrames. If `on` is None and not merging on indexes then this defaults
to the intersection of the columns in both DataFrames.
left_on : label or list, or array-like
Column or index level names to join on in the left DataFrame. Can also
be an array or list of arrays of the length of the left DataFrame.
These arrays are treated as if they are columns.
right_on : label or list, or array-like
Column or index level names to join on in the right DataFrame. Can also
be an array or list of arrays of the length of the right DataFrame.
These arrays are treated as if they are columns.
left_index : bool, default False
Use the index from the left DataFrame as the join key(s). If it is a
MultiIndex, the number of keys in the other DataFrame (either the index
or a number of columns) must match the number of levels.
right_index : bool, default False
Use the index from the right DataFrame as the join key. Same caveats as
left_index.
sort : bool, default False
Sort the join keys lexicographically in the result DataFrame. If False,
the order of the join keys depends on the join type (how keyword).
suffixes : tuple of (str, str), default ('_x', '_y')
Suffix to apply to overlapping column names in the left and right
side, respectively. To raise an exception on overlapping columns use
(False, False).
copy : bool, default True
If False, avoid copy if possible.
indicator : bool or str, default False
If True, adds a column to output DataFrame called "_merge" with
information on the source of each row.
If string, column with information on source of each row will be added to
output DataFrame, and column will be named value of string.
Information column is Categorical-type and takes on a value of "left_only"
for observations whose merge key only appears in 'left' DataFrame,
"right_only" for observations whose merge key only appears in 'right'
DataFrame, and "both" if the observation's merge key is found in both.
validate : str, optional
If specified, checks if merge is of specified type.
* "one_to_one" or "1:1": check if merge keys are unique in both
left and right datasets.
* "one_to_many" or "1:m": check if merge keys are unique in left
dataset.
* "many_to_one" or "m:1": check if merge keys are unique in right
dataset.
* "many_to_many" or "m:m": allowed, but does not result in checks.
.. versionadded:: 0.21.0
Returns
-------
DataFrame
A DataFrame of the two merged objects.
See Also
--------
merge_ordered : Merge with optional filling/interpolation.
merge_asof : Merge on nearest keys.
DataFrame.join : Similar method using indices.
Notes
-----
Support for specifying index levels as the `on`, `left_on`, and
`right_on` parameters was added in version 0.23.0
Support for merging named Series objects was added in version 0.24.0
Examples
--------
>>> df1 = pd.DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'],
... 'value': [1, 2, 3, 5]})
>>> df2 = pd.DataFrame({'rkey': ['foo', 'bar', 'baz', 'foo'],
... 'value': [5, 6, 7, 8]})
>>> df1
lkey value
0 foo 1
1 bar 2
2 baz 3
3 foo 5
>>> df2
rkey value
0 foo 5
1 bar 6
2 baz 7
3 foo 8
Merge df1 and df2 on the lkey and rkey columns. The value columns have
the default suffixes, _x and _y, appended.
>>> df1.merge(df2, left_on='lkey', right_on='rkey')
lkey value_x rkey value_y
0 foo 1 foo 5
1 foo 1 foo 8
2 foo 5 foo 5
3 foo 5 foo 8
4 bar 2 bar 6
5 baz 3 baz 7
Merge DataFrames df1 and df2 with specified left and right suffixes
appended to any overlapping columns.
>>> df1.merge(df2, left_on='lkey', right_on='rkey',
... suffixes=('_left', '_right'))
lkey value_left rkey value_right
0 foo 1 foo 5
1 foo 1 foo 8
2 foo 5 foo 5
3 foo 5 foo 8
4 bar 2 bar 6
5 baz 3 baz 7
Merge DataFrames df1 and df2, but raise an exception if the DataFrames have
any overlapping columns.
>>> df1.merge(df2, left_on='lkey', right_on='rkey', suffixes=(False, False))
Traceback (most recent call last):
...
ValueError: columns overlap but no suffix specified:
Index(['value'], dtype='object')
"""
# -----------------------------------------------------------------------
# DataFrame class
class DataFrame(NDFrame):
"""
Two-dimensional, size-mutable, potentially heterogeneous tabular data.
Data structure also contains labeled axes (rows and columns).
Arithmetic operations align on both row and column labels. Can be
thought of as a dict-like container for Series objects. The primary
pandas data structure.
Parameters
----------
data : ndarray (structured or homogeneous), Iterable, dict, or DataFrame
Dict can contain Series, arrays, constants, or list-like objects.
.. versionchanged:: 0.23.0
If data is a dict, column order follows insertion-order for
Python 3.6 and later.
.. versionchanged:: 0.25.0
If data is a list of dicts, column order follows insertion-order
for Python 3.6 and later.
index : Index or array-like
Index to use for resulting frame. Will default to RangeIndex if
no indexing information part of input data and no index provided.
columns : Index or array-like
Column labels to use for resulting frame. Will default to
RangeIndex (0, 1, 2, ..., n) if no column labels are provided.
dtype : dtype, default None
Data type to force. Only a single dtype is allowed. If None, infer.
copy : bool, default False
Copy data from inputs. Only affects DataFrame / 2d ndarray input.
See Also
--------
DataFrame.from_records : Constructor from tuples, also record arrays.
DataFrame.from_dict : From dicts of Series, arrays, or dicts.
read_csv
read_table
read_clipboard
Examples
--------
Constructing DataFrame from a dictionary.
>>> d = {'col1': [1, 2], 'col2': [3, 4]}
>>> df = pd.DataFrame(data=d)
>>> df
col1 col2
0 1 3
1 2 4
Notice that the inferred dtype is int64.
>>> df.dtypes
col1 int64
col2 int64
dtype: object
To enforce a single dtype:
>>> df = pd.DataFrame(data=d, dtype=np.int8)
>>> df.dtypes
col1 int8
col2 int8
dtype: object
Constructing DataFrame from numpy ndarray:
>>> df2 = pd.DataFrame(np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]),
... columns=['a', 'b', 'c'])
>>> df2
a b c
0 1 2 3
1 4 5 6
2 7 8 9
"""
_typ = "dataframe"
@property
def _constructor(self) -> Type["DataFrame"]:
return DataFrame
_constructor_sliced: Type[Series] = Series
_deprecations: FrozenSet[str] = NDFrame._deprecations | frozenset([])
_accessors: Set[str] = set()
@property
def _constructor_expanddim(self):
raise NotImplementedError("Not supported for DataFrames!")
# ----------------------------------------------------------------------
# Constructors
def __init__(
self,
data=None,
index: Optional[Axes] = None,
columns: Optional[Axes] = None,
dtype: Optional[Dtype] = None,
copy: bool = False,
):
if data is None:
data = {}
if dtype is not None:
dtype = self._validate_dtype(dtype)
if isinstance(data, DataFrame):
data = data._data
if isinstance(data, BlockManager):
mgr = self._init_mgr(
data, axes=dict(index=index, columns=columns), dtype=dtype, copy=copy
)
elif isinstance(data, dict):
mgr = init_dict(data, index, columns, dtype=dtype)
elif isinstance(data, ma.MaskedArray):
import numpy.ma.mrecords as mrecords
# masked recarray
if isinstance(data, mrecords.MaskedRecords):
mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy)
# a masked array
else:
mask = ma.getmaskarray(data)
if mask.any():
data, fill_value = maybe_upcast(data, copy=True)
data.soften_mask() # set hardmask False if it was True
data[mask] = fill_value
else:
data = data.copy()
mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy)
elif isinstance(data, (np.ndarray, Series, Index)):
if data.dtype.names:
data_columns = list(data.dtype.names)
data = {k: data[k] for k in data_columns}
if columns is None:
columns = data_columns
mgr = init_dict(data, index, columns, dtype=dtype)
elif getattr(data, "name", None) is not None:
mgr = init_dict({data.name: data}, index, columns, dtype=dtype)
else:
mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy)
# For data is list-like, or Iterable (will consume into list)
elif isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes)):
if not isinstance(data, abc.Sequence):
data = list(data)
if len(data) > 0:
if is_list_like(data[0]) and getattr(data[0], "ndim", 1) == 1:
if is_named_tuple(data[0]) and columns is None:
columns = data[0]._fields
arrays, columns = to_arrays(data, columns, dtype=dtype)
columns = ensure_index(columns)
# set the index
if index is None:
if isinstance(data[0], Series):
index = get_names_from_index(data)
elif isinstance(data[0], Categorical):
index = ibase.default_index(len(data[0]))
else:
index = ibase.default_index(len(data))
mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype)
else:
mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy)
else:
mgr = init_dict({}, index, columns, dtype=dtype)
else:
try:
arr = np.array(data, dtype=dtype, copy=copy)
except (ValueError, TypeError) as e:
exc = TypeError(
"DataFrame constructor called with "
"incompatible data and dtype: {e}".format(e=e)
)
raise exc from e
if arr.ndim == 0 and index is not None and columns is not None:
values = cast_scalar_to_array(
(len(index), len(columns)), data, dtype=dtype
)
mgr = init_ndarray(
values, index, columns, dtype=values.dtype, copy=False
)
else:
raise ValueError("DataFrame constructor not properly called!")
NDFrame.__init__(self, mgr, fastpath=True)
# ----------------------------------------------------------------------
@property
def axes(self) -> List[Index]:
"""
Return a list representing the axes of the DataFrame.
It has the row axis labels and column axis labels as the only members.
They are returned in that order.
Examples
--------
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.axes
[RangeIndex(start=0, stop=2, step=1), Index(['col1', 'col2'],
dtype='object')]
"""
return [self.index, self.columns]
@property
def shape(self) -> Tuple[int, int]:
"""
Return a tuple representing the dimensionality of the DataFrame.
See Also
--------
ndarray.shape
Examples
--------
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.shape
(2, 2)
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4],
... 'col3': [5, 6]})
>>> df.shape
(2, 3)
"""
return len(self.index), len(self.columns)
@property
def _is_homogeneous_type(self) -> bool:
"""
Whether all the columns in a DataFrame have the same type.
Returns
-------
bool
See Also
--------
Index._is_homogeneous_type : Whether the object has a single
dtype.
MultiIndex._is_homogeneous_type : Whether all the levels of a
MultiIndex have the same dtype.
Examples
--------
>>> DataFrame({"A": [1, 2], "B": [3, 4]})._is_homogeneous_type
True
>>> DataFrame({"A": [1, 2], "B": [3.0, 4.0]})._is_homogeneous_type
False
Items with the same type but different sizes are considered
different types.
>>> DataFrame({
... "A": np.array([1, 2], dtype=np.int32),
... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type
False
"""
if self._data.any_extension_types:
return len({block.dtype for block in self._data.blocks}) == 1
else:
return not self._data.is_mixed_type
# ----------------------------------------------------------------------
# Rendering Methods
def _repr_fits_vertical_(self) -> bool:
"""
Check length against max_rows.
"""
max_rows = get_option("display.max_rows")
return len(self) <= max_rows
def _repr_fits_horizontal_(self, ignore_width: bool = False) -> bool:
"""
Check if full repr fits in horizontal boundaries imposed by the display
options width and max_columns.
In case off non-interactive session, no boundaries apply.
`ignore_width` is here so ipnb+HTML output can behave the way
users expect. display.max_columns remains in effect.
GH3541, GH3573
"""
width, height = console.get_console_size()
max_columns = get_option("display.max_columns")
nb_columns = len(self.columns)
# exceed max columns
if (max_columns and nb_columns > max_columns) or (
(not ignore_width) and width and nb_columns > (width // 2)
):
return False
# used by repr_html under IPython notebook or scripts ignore terminal
# dims
if ignore_width or not console.in_interactive_session():
return True
if get_option("display.width") is not None or console.in_ipython_frontend():
# check at least the column row for excessive width
max_rows = 1
else:
max_rows = get_option("display.max_rows")
# when auto-detecting, so width=None and not in ipython front end
# check whether repr fits horizontal by actually checking
# the width of the rendered repr
buf = StringIO()
# only care about the stuff we'll actually print out
# and to_string on entire frame may be expensive
d = self
if not (max_rows is None): # unlimited rows
# min of two, where one may be None
d = d.iloc[: min(max_rows, len(d))]
else:
return True
d.to_string(buf=buf)
value = buf.getvalue()
repr_width = max(len(l) for l in value.split("\n"))
return repr_width < width
def _info_repr(self) -> bool:
"""
True if the repr should show the info view.
"""
info_repr_option = get_option("display.large_repr") == "info"
return info_repr_option and not (
self._repr_fits_horizontal_() and self._repr_fits_vertical_()
)
def __repr__(self) -> str:
"""
Return a string representation for a particular DataFrame.
"""
buf = StringIO("")
if self._info_repr():
self.info(buf=buf)
return buf.getvalue()
max_rows = get_option("display.max_rows")
min_rows = get_option("display.min_rows")
max_cols = get_option("display.max_columns")
max_colwidth = get_option("display.max_colwidth")
show_dimensions = get_option("display.show_dimensions")
if get_option("display.expand_frame_repr"):
width, _ = console.get_console_size()
else:
width = None
self.to_string(
buf=buf,
max_rows=max_rows,
min_rows=min_rows,
max_cols=max_cols,
line_width=width,
max_colwidth=max_colwidth,
show_dimensions=show_dimensions,
)
return buf.getvalue()
def _repr_html_(self) -> Optional[str]:
"""
Return a html representation for a particular DataFrame.
Mainly for IPython notebook.
"""
if self._info_repr():
buf = StringIO("")
self.info(buf=buf)
# need to escape the <class>, should be the first line.
val = buf.getvalue().replace("<", r"<", 1)
val = val.replace(">", r">", 1)
return "<pre>" + val + "</pre>"
if get_option("display.notebook_repr_html"):
max_rows = get_option("display.max_rows")
min_rows = get_option("display.min_rows")
max_cols = get_option("display.max_columns")
show_dimensions = get_option("display.show_dimensions")
formatter = fmt.DataFrameFormatter(
self,
columns=None,
col_space=None,
na_rep="NaN",
formatters=None,
float_format=None,
sparsify=None,
justify=None,
index_names=True,
header=True,
index=True,
bold_rows=True,
escape=True,
max_rows=max_rows,
min_rows=min_rows,
max_cols=max_cols,
show_dimensions=show_dimensions,
decimal=".",
table_id=None,
render_links=False,
)
return formatter.to_html(notebook=True)
else:
return None
@Substitution(
header_type="bool or sequence",
header="Write out the column names. If a list of strings "
"is given, it is assumed to be aliases for the "
"column names",
col_space_type="int",
col_space="The minimum width of each column",
)
@Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring)
def to_string(
self,
buf: Optional[FilePathOrBuffer[str]] = None,
columns: Optional[Sequence[str]] = None,
col_space: Optional[int] = None,
header: Union[bool, Sequence[str]] = True,
index: bool = True,
na_rep: str = "NaN",
formatters: Optional[fmt.formatters_type] = None,
float_format: Optional[fmt.float_format_type] = None,
sparsify: Optional[bool] = None,
index_names: bool = True,
justify: Optional[str] = None,
max_rows: Optional[int] = None,
min_rows: Optional[int] = None,
max_cols: Optional[int] = None,
show_dimensions: bool = False,
decimal: str = ".",
line_width: Optional[int] = None,
max_colwidth: Optional[int] = None,
encoding: Optional[str] = None,
) -> Optional[str]:
"""
Render a DataFrame to a console-friendly tabular output.
%(shared_params)s
line_width : int, optional
Width to wrap a line in characters.
max_colwidth : int, optional
Max width to truncate each column in characters. By default, no limit.
.. versionadded:: 1.0.0
encoding : str, default "utf-8"
Set character encoding.
.. versionadded:: 1.0
%(returns)s
See Also
--------
to_html : Convert DataFrame to HTML.
Examples
--------
>>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]}
>>> df = pd.DataFrame(d)
>>> print(df.to_string())
col1 col2
0 1 4
1 2 5
2 3 6
"""
from pandas import option_context
with option_context("display.max_colwidth", max_colwidth):
formatter = fmt.DataFrameFormatter(
self,
columns=columns,
col_space=col_space,
na_rep=na_rep,
formatters=formatters,
float_format=float_format,
sparsify=sparsify,
justify=justify,
index_names=index_names,
header=header,
index=index,
min_rows=min_rows,
max_rows=max_rows,
max_cols=max_cols,
show_dimensions=show_dimensions,
decimal=decimal,
line_width=line_width,
)
return formatter.to_string(buf=buf, encoding=encoding)
# ----------------------------------------------------------------------
@property
def style(self):
"""
Returns a Styler object.
Contains methods for building a styled HTML representation of the DataFrame.
a styled HTML representation fo the DataFrame.
See Also
--------
io.formats.style.Styler
"""
from pandas.io.formats.style import Styler
return Styler(self)
_shared_docs[
"items"
] = r"""
Iterate over (column name, Series) pairs.
Iterates over the DataFrame columns, returning a tuple with
the column name and the content as a Series.
Yields
------
label : object
The column names for the DataFrame being iterated over.
content : Series
The column entries belonging to each label, as a Series.
See Also
--------
DataFrame.iterrows : Iterate over DataFrame rows as
(index, Series) pairs.
DataFrame.itertuples : Iterate over DataFrame rows as namedtuples
of the values.
Examples
--------
>>> df = pd.DataFrame({'species': ['bear', 'bear', 'marsupial'],
... 'population': [1864, 22000, 80000]},
... index=['panda', 'polar', 'koala'])
>>> df
species population
panda bear 1864
polar bear 22000
koala marsupial 80000
>>> for label, content in df.items():
... print('label:', label)
... print('content:', content, sep='\n')
...
label: species
content:
panda bear
polar bear
koala marsupial
Name: species, dtype: object
label: population
content:
panda 1864
polar 22000
koala 80000
Name: population, dtype: int64
"""
@Appender(_shared_docs["items"])
def items(self) -> Iterable[Tuple[Optional[Hashable], Series]]:
if self.columns.is_unique and hasattr(self, "_item_cache"):
for k in self.columns:
yield k, self._get_item_cache(k)
else:
for i, k in enumerate(self.columns):
yield k, self._ixs(i, axis=1)
@Appender(_shared_docs["items"])
def iteritems(self):
yield from self.items()
def iterrows(self):
"""
Iterate over DataFrame rows as (index, Series) pairs.
Yields
------
index : label or tuple of label
The index of the row. A tuple for a `MultiIndex`.
data : Series
The data of the row as a Series.
it : generator
A generator that iterates over the rows of the frame.
See Also
--------
DataFrame.itertuples : Iterate over DataFrame rows as namedtuples of the values.
DataFrame.items : Iterate over (column name, Series) pairs.
Notes
-----
1. Because ``iterrows`` returns a Series for each row,
it does **not** preserve dtypes across the rows (dtypes are
preserved across columns for DataFrames). For example,
>>> df = pd.DataFrame([[1, 1.5]], columns=['int', 'float'])
>>> row = next(df.iterrows())[1]
>>> row
int 1.0
float 1.5
Name: 0, dtype: float64
>>> print(row['int'].dtype)
float64
>>> print(df['int'].dtype)
int64
To preserve dtypes while iterating over the rows, it is better
to use :meth:`itertuples` which returns namedtuples of the values
and which is generally faster than ``iterrows``.
2. You should **never modify** something you are iterating over.
This is not guaranteed to work in all cases. Depending on the
data types, the iterator returns a copy and not a view, and writing
to it will have no effect.
"""
columns = self.columns
klass = self._constructor_sliced
for k, v in zip(self.index, self.values):
s = klass(v, index=columns, name=k)
yield k, s
def itertuples(self, index=True, name="Pandas"):
"""
Iterate over DataFrame rows as namedtuples.
Parameters
----------
index : bool, default True
If True, return the index as the first element of the tuple.
name : str or None, default "Pandas"
The name of the returned namedtuples or None to return regular
tuples.
Returns
-------
iterator
An object to iterate over namedtuples for each row in the
DataFrame with the first field possibly being the index and
following fields being the column values.
See Also
--------
DataFrame.iterrows : Iterate over DataFrame rows as (index, Series)
pairs.
DataFrame.items : Iterate over (column name, Series) pairs.
Notes
-----
The column names will be renamed to positional names if they are
invalid Python identifiers, repeated, or start with an underscore.
With a large number of columns (>255), regular tuples are returned.
Examples
--------
>>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]},
... index=['dog', 'hawk'])
>>> df
num_legs num_wings
dog 4 0
hawk 2 2
>>> for row in df.itertuples():
... print(row)
...
Pandas(Index='dog', num_legs=4, num_wings=0)
Pandas(Index='hawk', num_legs=2, num_wings=2)
By setting the `index` parameter to False we can remove the index
as the first element of the tuple:
>>> for row in df.itertuples(index=False):
... print(row)
...
Pandas(num_legs=4, num_wings=0)
Pandas(num_legs=2, num_wings=2)
With the `name` parameter set we set a custom name for the yielded
namedtuples:
>>> for row in df.itertuples(name='Animal'):
... print(row)
...
Animal(Index='dog', num_legs=4, num_wings=0)
Animal(Index='hawk', num_legs=2, num_wings=2)
"""
arrays = []
fields = list(self.columns)
if index:
arrays.append(self.index)
fields.insert(0, "Index")
# use integer indexing because of possible duplicate column names
arrays.extend(self.iloc[:, k] for k in range(len(self.columns)))
# Python 3 supports at most 255 arguments to constructor
if name is not None and len(self.columns) + index < 256:
itertuple = collections.namedtuple(name, fields, rename=True)
return map(itertuple._make, zip(*arrays))
# fallback to regular tuples
return zip(*arrays)
def __len__(self) -> int:
"""
Returns length of info axis, but here we use the index.
"""
return len(self.index)
def dot(self, other):
"""
Compute the matrix multiplication between the DataFrame and other.
This method computes the matrix product between the DataFrame and the
values of an other Series, DataFrame or a numpy array.
It can also be called using ``self @ other`` in Python >= 3.5.
Parameters
----------
other : Series, DataFrame or array-like
The other object to compute the matrix product with.
Returns
-------
Series or DataFrame
If other is a Series, return the matrix product between self and
other as a Serie. If other is a DataFrame or a numpy.array, return
the matrix product of self and other in a DataFrame of a np.array.
See Also
--------
Series.dot: Similar method for Series.
Notes
-----
The dimensions of DataFrame and other must be compatible in order to
compute the matrix multiplication. In addition, the column names of
DataFrame and the index of other must contain the same values, as they
will be aligned prior to the multiplication.
The dot method for Series computes the inner product, instead of the
matrix product here.
Examples
--------
Here we multiply a DataFrame with a Series.
>>> df = pd.DataFrame([[0, 1, -2, -1], [1, 1, 1, 1]])
>>> s = pd.Series([1, 1, 2, 1])
>>> df.dot(s)
0 -4
1 5
dtype: int64
Here we multiply a DataFrame with another DataFrame.
>>> other = pd.DataFrame([[0, 1], [1, 2], [-1, -1], [2, 0]])
>>> df.dot(other)
0 1
0 1 4
1 2 2
Note that the dot method give the same result as @
>>> df @ other
0 1
0 1 4
1 2 2
The dot method works also if other is an np.array.
>>> arr = np.array([[0, 1], [1, 2], [-1, -1], [2, 0]])
>>> df.dot(arr)
0 1
0 1 4
1 2 2
Note how shuffling of the objects does not change the result.
>>> s2 = s.reindex([1, 0, 2, 3])
>>> df.dot(s2)
0 -4
1 5
dtype: int64
"""
if isinstance(other, (Series, DataFrame)):
common = self.columns.union(other.index)
if len(common) > len(self.columns) or len(common) > len(other.index):
raise ValueError("matrices are not aligned")
left = self.reindex(columns=common, copy=False)
right = other.reindex(index=common, copy=False)
lvals = left.values
rvals = right.values
else:
left = self
lvals = self.values
rvals = np.asarray(other)
if lvals.shape[1] != rvals.shape[0]:
raise ValueError(
"Dot product shape mismatch, "
"{s} vs {r}".format(s=lvals.shape, r=rvals.shape)
)
if isinstance(other, DataFrame):
return self._constructor(
np.dot(lvals, rvals), index=left.index, columns=other.columns
)
elif isinstance(other, Series):
return Series(np.dot(lvals, rvals), index=left.index)
elif isinstance(rvals, (np.ndarray, Index)):
result = np.dot(lvals, rvals)
if result.ndim == 2:
return self._constructor(result, index=left.index)
else:
return Series(result, index=left.index)
else: # pragma: no cover
raise TypeError("unsupported type: {oth}".format(oth=type(other)))
def __matmul__(self, other):
"""
Matrix multiplication using binary `@` operator in Python>=3.5.
"""
return self.dot(other)
def __rmatmul__(self, other):
"""
Matrix multiplication using binary `@` operator in Python>=3.5.
"""
return self.T.dot(np.transpose(other)).T
# ----------------------------------------------------------------------
# IO methods (to / from other formats)
@classmethod
def from_dict(cls, data, orient="columns", dtype=None, columns=None):
"""
Construct DataFrame from dict of array-like or dicts.
Creates DataFrame object from dictionary by columns or by index
allowing dtype specification.
Parameters
----------
data : dict
Of the form {field : array-like} or {field : dict}.
orient : {'columns', 'index'}, default 'columns'
The "orientation" of the data. If the keys of the passed dict
should be the columns of the resulting DataFrame, pass 'columns'
(default). Otherwise if the keys should be rows, pass 'index'.
dtype : dtype, default None
Data type to force, otherwise infer.
columns : list, default None
Column labels to use when ``orient='index'``. Raises a ValueError
if used with ``orient='columns'``.
.. versionadded:: 0.23.0
Returns
-------
DataFrame
See Also
--------
DataFrame.from_records : DataFrame from ndarray (structured
dtype), list of tuples, dict, or DataFrame.
DataFrame : DataFrame object creation using constructor.
Examples
--------
By default the keys of the dict become the DataFrame columns:
>>> data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']}
>>> pd.DataFrame.from_dict(data)
col_1 col_2
0 3 a
1 2 b
2 1 c
3 0 d
Specify ``orient='index'`` to create the DataFrame using dictionary
keys as rows:
>>> data = {'row_1': [3, 2, 1, 0], 'row_2': ['a', 'b', 'c', 'd']}
>>> pd.DataFrame.from_dict(data, orient='index')
0 1 2 3
row_1 3 2 1 0
row_2 a b c d
When using the 'index' orientation, the column names can be
specified manually:
>>> pd.DataFrame.from_dict(data, orient='index',
... columns=['A', 'B', 'C', 'D'])
A B C D
row_1 3 2 1 0
row_2 a b c d
"""
index = None
orient = orient.lower()
if orient == "index":
if len(data) > 0:
# TODO speed up Series case
if isinstance(list(data.values())[0], (Series, dict)):
data = _from_nested_dict(data)
else:
data, index = list(data.values()), list(data.keys())
elif orient == "columns":
if columns is not None:
raise ValueError("cannot use columns parameter with orient='columns'")
else: # pragma: no cover
raise ValueError("only recognize index or columns for orient")
return cls(data, index=index, columns=columns, dtype=dtype)
def to_numpy(self, dtype=None, copy=False):
"""
Convert the DataFrame to a NumPy array.
.. versionadded:: 0.24.0
By default, the dtype of the returned array will be the common NumPy
dtype of all types in the DataFrame. For example, if the dtypes are
``float16`` and ``float32``, the results dtype will be ``float32``.
This may require copying data and coercing values, which may be
expensive.
Parameters
----------
dtype : str or numpy.dtype, optional
The dtype to pass to :meth:`numpy.asarray`.
copy : bool, default False
Whether to ensure that the returned value is a not a view on
another array. Note that ``copy=False`` does not *ensure* that
``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that
a copy is made, even if not strictly necessary.
Returns
-------
numpy.ndarray
See Also
--------
Series.to_numpy : Similar method for Series.
Examples
--------
>>> pd.DataFrame({"A": [1, 2], "B": [3, 4]}).to_numpy()
array([[1, 3],
[2, 4]])
With heterogeneous data, the lowest common type will have to
be used.
>>> df = pd.DataFrame({"A": [1, 2], "B": [3.0, 4.5]})
>>> df.to_numpy()
array([[1. , 3. ],
[2. , 4.5]])
For a mix of numeric and non-numeric types, the output array will
have object dtype.
>>> df['C'] = pd.date_range('2000', periods=2)
>>> df.to_numpy()
array([[1, 3.0, Timestamp('2000-01-01 00:00:00')],
[2, 4.5, Timestamp('2000-01-02 00:00:00')]], dtype=object)
"""
result = np.array(self.values, dtype=dtype, copy=copy)
return result
def to_dict(self, orient="dict", into=dict):
"""
Convert the DataFrame to a dictionary.
The type of the key-value pairs can be customized with the parameters
(see below).
Parameters
----------
orient : str {'dict', 'list', 'series', 'split', 'records', 'index'}
Determines the type of the values of the dictionary.
- 'dict' (default) : dict like {column -> {index -> value}}
- 'list' : dict like {column -> [values]}
- 'series' : dict like {column -> Series(values)}
- 'split' : dict like
{'index' -> [index], 'columns' -> [columns], 'data' -> [values]}
- 'records' : list like
[{column -> value}, ... , {column -> value}]
- 'index' : dict like {index -> {column -> value}}
Abbreviations are allowed. `s` indicates `series` and `sp`
indicates `split`.
into : class, default dict
The collections.abc.Mapping subclass used for all Mappings
in the return value. Can be the actual class or an empty
instance of the mapping type you want. If you want a
collections.defaultdict, you must pass it initialized.
.. versionadded:: 0.21.0
Returns
-------
dict, list or collections.abc.Mapping
Return a collections.abc.Mapping object representing the DataFrame.
The resulting transformation depends on the `orient` parameter.
See Also
--------
DataFrame.from_dict: Create a DataFrame from a dictionary.
DataFrame.to_json: Convert a DataFrame to JSON format.
Examples
--------
>>> df = pd.DataFrame({'col1': [1, 2],
... 'col2': [0.5, 0.75]},
... index=['row1', 'row2'])
>>> df
col1 col2
row1 1 0.50
row2 2 0.75
>>> df.to_dict()
{'col1': {'row1': 1, 'row2': 2}, 'col2': {'row1': 0.5, 'row2': 0.75}}
You can specify the return orientation.
>>> df.to_dict('series')
{'col1': row1 1
row2 2
Name: col1, dtype: int64,
'col2': row1 0.50
row2 0.75
Name: col2, dtype: float64}
>>> df.to_dict('split')
{'index': ['row1', 'row2'], 'columns': ['col1', 'col2'],
'data': [[1, 0.5], [2, 0.75]]}
>>> df.to_dict('records')
[{'col1': 1, 'col2': 0.5}, {'col1': 2, 'col2': 0.75}]
>>> df.to_dict('index')
{'row1': {'col1': 1, 'col2': 0.5}, 'row2': {'col1': 2, 'col2': 0.75}}
You can also specify the mapping type.
>>> from collections import OrderedDict, defaultdict
>>> df.to_dict(into=OrderedDict)
OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])),
('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))])
If you want a `defaultdict`, you need to initialize it:
>>> dd = defaultdict(list)
>>> df.to_dict('records', into=dd)
[defaultdict(<class 'list'>, {'col1': 1, 'col2': 0.5}),
defaultdict(<class 'list'>, {'col1': 2, 'col2': 0.75})]
"""
if not self.columns.is_unique:
warnings.warn(
"DataFrame columns are not unique, some columns will be omitted.",
UserWarning,
stacklevel=2,
)
# GH16122
into_c = com.standardize_mapping(into)
if orient.lower().startswith("d"):
return into_c((k, v.to_dict(into)) for k, v in self.items())
elif orient.lower().startswith("l"):
return into_c((k, v.tolist()) for k, v in self.items())
elif orient.lower().startswith("sp"):
return into_c(
(
("index", self.index.tolist()),
("columns", self.columns.tolist()),
(
"data",
[
list(map(com.maybe_box_datetimelike, t))
for t in self.itertuples(index=False, name=None)
],
),
)
)
elif orient.lower().startswith("s"):
return into_c((k, com.maybe_box_datetimelike(v)) for k, v in self.items())
elif orient.lower().startswith("r"):
columns = self.columns.tolist()
rows = (
dict(zip(columns, row))
for row in self.itertuples(index=False, name=None)
)
return [
into_c((k, com.maybe_box_datetimelike(v)) for k, v in row.items())
for row in rows
]
elif orient.lower().startswith("i"):
if not self.index.is_unique:
raise ValueError("DataFrame index must be unique for orient='index'.")
return into_c(
(t[0], dict(zip(self.columns, t[1:])))
for t in self.itertuples(name=None)
)
else:
raise ValueError("orient '{o}' not understood".format(o=orient))
def to_gbq(
self,
destination_table,
project_id=None,
chunksize=None,
reauth=False,
if_exists="fail",
auth_local_webserver=False,
table_schema=None,
location=None,
progress_bar=True,
credentials=None,
verbose=None,
private_key=None,
):
"""
Write a DataFrame to a Google BigQuery table.
This function requires the `pandas-gbq package
<https://pandas-gbq.readthedocs.io>`__.
See the `How to authenticate with Google BigQuery
<https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__
guide for authentication instructions.
Parameters
----------
destination_table : str
Name of table to be written, in the form ``dataset.tablename``.
project_id : str, optional
Google BigQuery Account project ID. Optional when available from
the environment.
chunksize : int, optional
Number of rows to be inserted in each chunk from the dataframe.
Set to ``None`` to load the whole dataframe at once.
reauth : bool, default False
Force Google BigQuery to re-authenticate the user. This is useful
if multiple accounts are used.
if_exists : str, default 'fail'
Behavior when the destination table exists. Value can be one of:
``'fail'``
If table exists, do nothing.
``'replace'``
If table exists, drop it, recreate it, and insert data.
``'append'``
If table exists, insert data. Create if does not exist.
auth_local_webserver : bool, default False
Use the `local webserver flow`_ instead of the `console flow`_
when getting user credentials.
.. _local webserver flow:
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server
.. _console flow:
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console
*New in version 0.2.0 of pandas-gbq*.
table_schema : list of dicts, optional
List of BigQuery table fields to which according DataFrame
columns conform to, e.g. ``[{'name': 'col1', 'type':
'STRING'},...]``. If schema is not provided, it will be
generated according to dtypes of DataFrame columns. See
BigQuery API documentation on available names of a field.
*New in version 0.3.1 of pandas-gbq*.
location : str, optional
Location where the load job should run. See the `BigQuery locations
documentation
<https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a
list of available locations. The location must match that of the
target dataset.
*New in version 0.5.0 of pandas-gbq*.
progress_bar : bool, default True
Use the library `tqdm` to show the progress bar for the upload,
chunk by chunk.
*New in version 0.5.0 of pandas-gbq*.
credentials : google.auth.credentials.Credentials, optional
Credentials for accessing Google APIs. Use this parameter to
override default credentials, such as to use Compute Engine
:class:`google.auth.compute_engine.Credentials` or Service
Account :class:`google.oauth2.service_account.Credentials`
directly.
*New in version 0.8.0 of pandas-gbq*.
.. versionadded:: 0.24.0
verbose : bool, deprecated
Deprecated in pandas-gbq version 0.4.0. Use the `logging module
to adjust verbosity instead
<https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__.
private_key : str, deprecated
Deprecated in pandas-gbq version 0.8.0. Use the ``credentials``
parameter and
:func:`google.oauth2.service_account.Credentials.from_service_account_info`
or
:func:`google.oauth2.service_account.Credentials.from_service_account_file`
instead.
Service account private key in JSON format. Can be file path
or string contents. This is useful for remote server
authentication (eg. Jupyter/IPython notebook on remote host).
See Also
--------
pandas_gbq.to_gbq : This function in the pandas-gbq library.
read_gbq : Read a DataFrame from Google BigQuery.
"""
from pandas.io import gbq
gbq.to_gbq(
self,
destination_table,
project_id=project_id,
chunksize=chunksize,
reauth=reauth,
if_exists=if_exists,
auth_local_webserver=auth_local_webserver,
table_schema=table_schema,
location=location,
progress_bar=progress_bar,
credentials=credentials,
verbose=verbose,
private_key=private_key,
)
@classmethod
def from_records(
cls,
data,
index=None,
exclude=None,
columns=None,
coerce_float=False,
nrows=None,
):
"""
Convert structured or record ndarray to DataFrame.
Parameters
----------
data : ndarray (structured dtype), list of tuples, dict, or DataFrame
index : str, list of fields, array-like
Field of array to use as the index, alternately a specific set of
input labels to use.
exclude : sequence, default None
Columns or fields to exclude.
columns : sequence, default None
Column names to use. If the passed data do not have names
associated with them, this argument provides names for the
columns. Otherwise this argument indicates the order of the columns
in the result (any names not found in the data will become all-NA
columns).
coerce_float : bool, default False
Attempt to convert values of non-string, non-numeric objects (like
decimal.Decimal) to floating point, useful for SQL result sets.
nrows : int, default None
Number of rows to read if data is an iterator.
Returns
-------
DataFrame
"""
# Make a copy of the input columns so we can modify it
if columns is not None:
columns = ensure_index(columns)
if is_iterator(data):
if nrows == 0:
return cls()
try:
first_row = next(data)
except StopIteration:
return cls(index=index, columns=columns)
dtype = None
if hasattr(first_row, "dtype") and first_row.dtype.names:
dtype = first_row.dtype
values = [first_row]
if nrows is None:
values += data
else:
values.extend(itertools.islice(data, nrows - 1))
if dtype is not None:
data = np.array(values, dtype=dtype)
else:
data = values
if isinstance(data, dict):
if columns is None:
columns = arr_columns = ensure_index(sorted(data))
arrays = [data[k] for k in columns]
else:
arrays = []
arr_columns = []
for k, v in data.items():
if k in columns:
arr_columns.append(k)
arrays.append(v)
arrays, arr_columns = reorder_arrays(arrays, arr_columns, columns)
elif isinstance(data, (np.ndarray, DataFrame)):
arrays, columns = to_arrays(data, columns)
if columns is not None:
columns = ensure_index(columns)
arr_columns = columns
else:
arrays, arr_columns = to_arrays(data, columns, coerce_float=coerce_float)
arr_columns = ensure_index(arr_columns)
if columns is not None:
columns = ensure_index(columns)
else:
columns = arr_columns
if exclude is None:
exclude = set()
else:
exclude = set(exclude)
result_index = None
if index is not None:
if isinstance(index, str) or not hasattr(index, "__iter__"):
i = columns.get_loc(index)
exclude.add(index)
if len(arrays) > 0:
result_index = Index(arrays[i], name=index)
else:
result_index = Index([], name=index)
else:
try:
index_data = [arrays[arr_columns.get_loc(field)] for field in index]
except (KeyError, TypeError):
# raised by get_loc, see GH#29258
result_index = index
else:
result_index = ensure_index_from_sequences(index_data, names=index)
exclude.update(index)
if any(exclude):
arr_exclude = [x for x in exclude if x in arr_columns]
to_remove = [arr_columns.get_loc(col) for col in arr_exclude]
arrays = [v for i, v in enumerate(arrays) if i not in to_remove]
arr_columns = arr_columns.drop(arr_exclude)
columns = columns.drop(exclude)
mgr = arrays_to_mgr(arrays, arr_columns, result_index, columns)
return cls(mgr)
def to_records(self, index=True, column_dtypes=None, index_dtypes=None):
"""
Convert DataFrame to a NumPy record array.
Index will be included as the first field of the record array if
requested.
Parameters
----------
index : bool, default True
Include index in resulting record array, stored in 'index'
field or using the index label, if set.
column_dtypes : str, type, dict, default None
.. versionadded:: 0.24.0
If a string or type, the data type to store all columns. If
a dictionary, a mapping of column names and indices (zero-indexed)
to specific data types.
index_dtypes : str, type, dict, default None
.. versionadded:: 0.24.0
If a string or type, the data type to store all index levels. If
a dictionary, a mapping of index level names and indices
(zero-indexed) to specific data types.
This mapping is applied only if `index=True`.
Returns
-------
numpy.recarray
NumPy ndarray with the DataFrame labels as fields and each row
of the DataFrame as entries.
See Also
--------
DataFrame.from_records: Convert structured or record ndarray
to DataFrame.
numpy.recarray: An ndarray that allows field access using
attributes, analogous to typed columns in a
spreadsheet.
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]},
... index=['a', 'b'])
>>> df
A B
a 1 0.50
b 2 0.75
>>> df.to_records()
rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)],
dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')])
If the DataFrame index has no label then the recarray field name
is set to 'index'. If the index has a label then this is used as the
field name:
>>> df.index = df.index.rename("I")
>>> df.to_records()
rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)],
dtype=[('I', 'O'), ('A', '<i8'), ('B', '<f8')])
The index can be excluded from the record array:
>>> df.to_records(index=False)
rec.array([(1, 0.5 ), (2, 0.75)],
dtype=[('A', '<i8'), ('B', '<f8')])
Data types can be specified for the columns:
>>> df.to_records(column_dtypes={"A": "int32"})
rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)],
dtype=[('I', 'O'), ('A', '<i4'), ('B', '<f8')])
As well as for the index:
>>> df.to_records(index_dtypes="<S2")
rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)],
dtype=[('I', 'S2'), ('A', '<i8'), ('B', '<f8')])
>>> index_dtypes = "<S{}".format(df.index.str.len().max())
>>> df.to_records(index_dtypes=index_dtypes)
rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)],
dtype=[('I', 'S1'), ('A', '<i8'), ('B', '<f8')])
"""
if index:
if isinstance(self.index, ABCMultiIndex):
# array of tuples to numpy cols. copy copy copy
ix_vals = list(map(np.array, zip(*self.index.values)))
else:
ix_vals = [self.index.values]
arrays = ix_vals + [self[c]._internal_get_values() for c in self.columns]
count = 0
index_names = list(self.index.names)
if isinstance(self.index, ABCMultiIndex):
for i, n in enumerate(index_names):
if n is None:
index_names[i] = f"level_{count}"
count += 1
elif index_names[0] is None:
index_names = ["index"]
names = [str(name) for name in itertools.chain(index_names, self.columns)]
else:
arrays = [self[c]._internal_get_values() for c in self.columns]
names = [str(c) for c in self.columns]
index_names = []
index_len = len(index_names)
formats = []
for i, v in enumerate(arrays):
index = i
# When the names and arrays are collected, we
# first collect those in the DataFrame's index,
# followed by those in its columns.
#
# Thus, the total length of the array is:
# len(index_names) + len(DataFrame.columns).
#
# This check allows us to see whether we are
# handling a name / array in the index or column.
if index < index_len:
dtype_mapping = index_dtypes
name = index_names[index]
else:
index -= index_len
dtype_mapping = column_dtypes
name = self.columns[index]
# We have a dictionary, so we get the data type
# associated with the index or column (which can
# be denoted by its name in the DataFrame or its
# position in DataFrame's array of indices or
# columns, whichever is applicable.
if is_dict_like(dtype_mapping):
if name in dtype_mapping:
dtype_mapping = dtype_mapping[name]
elif index in dtype_mapping:
dtype_mapping = dtype_mapping[index]
else:
dtype_mapping = None
# If no mapping can be found, use the array's
# dtype attribute for formatting.
#
# A valid dtype must either be a type or
# string naming a type.
if dtype_mapping is None:
formats.append(v.dtype)
elif isinstance(dtype_mapping, (type, np.dtype, str)):
formats.append(dtype_mapping)
else:
element = "row" if i < index_len else "column"
msg = ("Invalid dtype {dtype} specified for {element} {name}").format(
dtype=dtype_mapping, element=element, name=name
)
raise ValueError(msg)
return np.rec.fromarrays(arrays, dtype={"names": names, "formats": formats})
@classmethod
def _from_arrays(cls, arrays, columns, index, dtype=None):
mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype)
return cls(mgr)
def to_stata(
self,
fname,
convert_dates=None,
write_index=True,
byteorder=None,
time_stamp=None,
data_label=None,
variable_labels=None,
version=114,
convert_strl=None,
):
"""
Export DataFrame object to Stata dta format.
Writes the DataFrame to a Stata dataset file.
"dta" files contain a Stata dataset.
Parameters
----------
fname : str, buffer or path object
String, path object (pathlib.Path or py._path.local.LocalPath) or
object implementing a binary write() function. If using a buffer
then the buffer will not be automatically closed after the file
data has been written.
convert_dates : dict
Dictionary mapping columns containing datetime types to stata
internal format to use when writing the dates. Options are 'tc',
'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either an integer
or a name. Datetime columns that do not have a conversion type
specified will be converted to 'tc'. Raises NotImplementedError if
a datetime column has timezone information.
write_index : bool
Write the index to Stata dataset.
byteorder : str
Can be ">", "<", "little", or "big". default is `sys.byteorder`.
time_stamp : datetime
A datetime to use as file creation date. Default is the current
time.
data_label : str, optional
A label for the data set. Must be 80 characters or smaller.
variable_labels : dict
Dictionary containing columns as keys and variable labels as
values. Each label must be 80 characters or smaller.
version : {114, 117}, default 114
Version to use in the output dta file. Version 114 can be used
read by Stata 10 and later. Version 117 can be read by Stata 13
or later. Version 114 limits string variables to 244 characters or
fewer while 117 allows strings with lengths up to 2,000,000
characters.
.. versionadded:: 0.23.0
convert_strl : list, optional
List of column names to convert to string columns to Stata StrL
format. Only available if version is 117. Storing strings in the
StrL format can produce smaller dta files if strings have more than
8 characters and values are repeated.
.. versionadded:: 0.23.0
Raises
------
NotImplementedError
* If datetimes contain timezone information
* Column dtype is not representable in Stata
ValueError
* Columns listed in convert_dates are neither datetime64[ns]
or datetime.datetime
* Column listed in convert_dates is not in DataFrame
* Categorical label contains more than 32,000 characters
See Also
--------
read_stata : Import Stata data files.
io.stata.StataWriter : Low-level writer for Stata data files.
io.stata.StataWriter117 : Low-level writer for version 117 files.
Examples
--------
>>> df = pd.DataFrame({'animal': ['falcon', 'parrot', 'falcon',
... 'parrot'],
... 'speed': [350, 18, 361, 15]})
>>> df.to_stata('animals.dta') # doctest: +SKIP
"""
kwargs = {}
if version not in (114, 117):
raise ValueError("Only formats 114 and 117 supported.")
if version == 114:
if convert_strl is not None:
raise ValueError("strl support is only available when using format 117")
from pandas.io.stata import StataWriter as statawriter
else:
from pandas.io.stata import StataWriter117 as statawriter
kwargs["convert_strl"] = convert_strl
writer = statawriter(
fname,
self,
convert_dates=convert_dates,
byteorder=byteorder,
time_stamp=time_stamp,
data_label=data_label,
write_index=write_index,
variable_labels=variable_labels,
**kwargs,
)
writer.write_file()
def to_feather(self, fname):
"""
Write out the binary feather-format for DataFrames.
Parameters
----------
fname : str
String file path.
"""
from pandas.io.feather_format import to_feather
to_feather(self, fname)
def to_parquet(
self,
fname,
engine="auto",
compression="snappy",
index=None,
partition_cols=None,
**kwargs,
):
"""
Write a DataFrame to the binary parquet format.
.. versionadded:: 0.21.0
This function writes the dataframe as a `parquet file
<https://parquet.apache.org/>`_. You can choose different parquet
backends, and have the option of compression. See
:ref:`the user guide <io.parquet>` for more details.
Parameters
----------
fname : str
File path or Root Directory path. Will be used as Root Directory
path while writing a partitioned dataset.
.. versionchanged:: 0.24.0
engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto'
Parquet library to use. If 'auto', then the option
``io.parquet.engine`` is used. The default ``io.parquet.engine``
behavior is to try 'pyarrow', falling back to 'fastparquet' if
'pyarrow' is unavailable.
compression : {'snappy', 'gzip', 'brotli', None}, default 'snappy'
Name of the compression to use. Use ``None`` for no compression.
index : bool, default None
If ``True``, include the dataframe's index(es) in the file output.
If ``False``, they will not be written to the file.
If ``None``, similar to ``True`` the dataframe's index(es)
will be saved. However, instead of being saved as values,
the RangeIndex will be stored as a range in the metadata so it
doesn't require much space and is faster. Other indexes will
be included as columns in the file output.
.. versionadded:: 0.24.0
partition_cols : list, optional, default None
Column names by which to partition the dataset.
Columns are partitioned in the order they are given.
.. versionadded:: 0.24.0
**kwargs
Additional arguments passed to the parquet library. See
:ref:`pandas io <io.parquet>` for more details.
See Also
--------
read_parquet : Read a parquet file.
DataFrame.to_csv : Write a csv file.
DataFrame.to_sql : Write to a sql table.
DataFrame.to_hdf : Write to hdf.
Notes
-----
This function requires either the `fastparquet
<https://pypi.org/project/fastparquet>`_ or `pyarrow
<https://arrow.apache.org/docs/python/>`_ library.
Examples
--------
>>> df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]})
>>> df.to_parquet('df.parquet.gzip',
... compression='gzip') # doctest: +SKIP
>>> pd.read_parquet('df.parquet.gzip') # doctest: +SKIP
col1 col2
0 1 3
1 2 4
"""
from pandas.io.parquet import to_parquet
to_parquet(
self,
fname,
engine,
compression=compression,
index=index,
partition_cols=partition_cols,
**kwargs,
)
@Substitution(
header_type="bool",
header="Whether to print column labels, default True",
col_space_type="str or int",
col_space="The minimum width of each column in CSS length "
"units. An int is assumed to be px units.\n\n"
" .. versionadded:: 0.25.0\n"
" Ability to use str",
)
@Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring)
def to_html(
self,
buf=None,
columns=None,
col_space=None,
header=True,
index=True,
na_rep="NaN",
formatters=None,
float_format=None,
sparsify=None,
index_names=True,
justify=None,
max_rows=None,
max_cols=None,
show_dimensions=False,
decimal=".",
bold_rows=True,
classes=None,
escape=True,
notebook=False,
border=None,
table_id=None,
render_links=False,
encoding=None,
):
"""
Render a DataFrame as an HTML table.
%(shared_params)s
bold_rows : bool, default True
Make the row labels bold in the output.
classes : str or list or tuple, default None
CSS class(es) to apply to the resulting html table.
escape : bool, default True
Convert the characters <, >, and & to HTML-safe sequences.
notebook : {True, False}, default False
Whether the generated HTML is for IPython Notebook.
border : int
A ``border=border`` attribute is included in the opening
`<table>` tag. Default ``pd.options.display.html.border``.
encoding : str, default "utf-8"
Set character encoding
.. versionadded:: 1.0
table_id : str, optional
A css id is included in the opening `<table>` tag if specified.
.. versionadded:: 0.23.0
render_links : bool, default False
Convert URLs to HTML links.
.. versionadded:: 0.24.0
%(returns)s
See Also
--------
to_string : Convert DataFrame to a string.
"""
if justify is not None and justify not in fmt._VALID_JUSTIFY_PARAMETERS:
raise ValueError("Invalid value for justify parameter")
formatter = fmt.DataFrameFormatter(
self,
columns=columns,
col_space=col_space,
na_rep=na_rep,
formatters=formatters,
float_format=float_format,
sparsify=sparsify,
justify=justify,
index_names=index_names,
header=header,
index=index,
bold_rows=bold_rows,
escape=escape,
max_rows=max_rows,
max_cols=max_cols,
show_dimensions=show_dimensions,
decimal=decimal,
table_id=table_id,
render_links=render_links,
)
# TODO: a generic formatter wld b in DataFrameFormatter
return formatter.to_html(
buf=buf,
classes=classes,
notebook=notebook,
border=border,
encoding=encoding,
)
# ----------------------------------------------------------------------
def info(
self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None
):
"""
Print a concise summary of a DataFrame.
This method prints information about a DataFrame including
the index dtype and column dtypes, non-null values and memory usage.
Parameters
----------
verbose : bool, optional
Whether to print the full summary. By default, the setting in
``pandas.options.display.max_info_columns`` is followed.
buf : writable buffer, defaults to sys.stdout
Where to send the output. By default, the output is printed to
sys.stdout. Pass a writable buffer if you need to further process
the output.
max_cols : int, optional
When to switch from the verbose to the truncated output. If the
DataFrame has more than `max_cols` columns, the truncated output
is used. By default, the setting in
``pandas.options.display.max_info_columns`` is used.
memory_usage : bool, str, optional
Specifies whether total memory usage of the DataFrame
elements (including the index) should be displayed. By default,
this follows the ``pandas.options.display.memory_usage`` setting.
True always show memory usage. False never shows memory usage.
A value of 'deep' is equivalent to "True with deep introspection".
Memory usage is shown in human-readable units (base-2
representation). Without deep introspection a memory estimation is
made based in column dtype and number of rows assuming values
consume the same memory amount for corresponding dtypes. With deep
memory introspection, a real memory usage calculation is performed
at the cost of computational resources.
null_counts : bool, optional
Whether to show the non-null counts. By default, this is shown
only if the frame is smaller than
``pandas.options.display.max_info_rows`` and
``pandas.options.display.max_info_columns``. A value of True always
shows the counts, and False never shows the counts.
Returns
-------
None
This method prints a summary of a DataFrame and returns None.
See Also
--------
DataFrame.describe: Generate descriptive statistics of DataFrame
columns.
DataFrame.memory_usage: Memory usage of DataFrame columns.
Examples
--------
>>> int_values = [1, 2, 3, 4, 5]
>>> text_values = ['alpha', 'beta', 'gamma', 'delta', 'epsilon']
>>> float_values = [0.0, 0.25, 0.5, 0.75, 1.0]
>>> df = pd.DataFrame({"int_col": int_values, "text_col": text_values,
... "float_col": float_values})
>>> df
int_col text_col float_col
0 1 alpha 0.00
1 2 beta 0.25
2 3 gamma 0.50
3 4 delta 0.75
4 5 epsilon 1.00
Prints information of all columns:
>>> df.info(verbose=True)
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 5 entries, 0 to 4
Data columns (total 3 columns):
int_col 5 non-null int64
text_col 5 non-null object
float_col 5 non-null float64
dtypes: float64(1), int64(1), object(1)
memory usage: 248.0+ bytes
Prints a summary of columns count and its dtypes but not per column
information:
>>> df.info(verbose=False)
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 5 entries, 0 to 4
Columns: 3 entries, int_col to float_col
dtypes: float64(1), int64(1), object(1)
memory usage: 248.0+ bytes
Pipe output of DataFrame.info to buffer instead of sys.stdout, get
buffer content and writes to a text file:
>>> import io
>>> buffer = io.StringIO()
>>> df.info(buf=buffer)
>>> s = buffer.getvalue()
>>> with open("df_info.txt", "w",
... encoding="utf-8") as f: # doctest: +SKIP
... f.write(s)
260
The `memory_usage` parameter allows deep introspection mode, specially
useful for big DataFrames and fine-tune memory optimization:
>>> random_strings_array = np.random.choice(['a', 'b', 'c'], 10 ** 6)
>>> df = pd.DataFrame({
... 'column_1': np.random.choice(['a', 'b', 'c'], 10 ** 6),
... 'column_2': np.random.choice(['a', 'b', 'c'], 10 ** 6),
... 'column_3': np.random.choice(['a', 'b', 'c'], 10 ** 6)
... })
>>> df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1000000 entries, 0 to 999999
Data columns (total 3 columns):
column_1 1000000 non-null object
column_2 1000000 non-null object
column_3 1000000 non-null object
dtypes: object(3)
memory usage: 22.9+ MB
>>> df.info(memory_usage='deep')
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1000000 entries, 0 to 999999
Data columns (total 3 columns):
column_1 1000000 non-null object
column_2 1000000 non-null object
column_3 1000000 non-null object
dtypes: object(3)
memory usage: 188.8 MB
"""
if buf is None: # pragma: no cover
buf = sys.stdout
lines = []
lines.append(str(type(self)))
lines.append(self.index._summary())
if len(self.columns) == 0:
lines.append("Empty {name}".format(name=type(self).__name__))
fmt.buffer_put_lines(buf, lines)
return
cols = self.columns
# hack
if max_cols is None:
max_cols = get_option("display.max_info_columns", len(self.columns) + 1)
max_rows = get_option("display.max_info_rows", len(self) + 1)
if null_counts is None:
show_counts = (len(self.columns) <= max_cols) and (len(self) < max_rows)
else:
show_counts = null_counts
exceeds_info_cols = len(self.columns) > max_cols
def _verbose_repr():
lines.append(f"Data columns (total {len(self.columns)} columns):")
space = max(len(pprint_thing(k)) for k in self.columns) + 4
counts = None
tmpl = "{count}{dtype}"
if show_counts:
counts = self.count()
if len(cols) != len(counts): # pragma: no cover
raise AssertionError(
"Columns must equal counts "
"({cols:d} != {counts:d})".format(
cols=len(cols), counts=len(counts)
)
)
tmpl = "{count} non-null {dtype}"
dtypes = self.dtypes
for i, col in enumerate(self.columns):
dtype = dtypes.iloc[i]
col = pprint_thing(col)
count = ""
if show_counts:
count = counts.iloc[i]
lines.append(
_put_str(col, space) + tmpl.format(count=count, dtype=dtype)
)
def _non_verbose_repr():
lines.append(self.columns._summary(name="Columns"))
def _sizeof_fmt(num, size_qualifier):
# returns size in human readable format
for x in ["bytes", "KB", "MB", "GB", "TB"]:
if num < 1024.0:
return "{num:3.1f}{size_q} {x}".format(
num=num, size_q=size_qualifier, x=x
)
num /= 1024.0
return "{num:3.1f}{size_q} {pb}".format(
num=num, size_q=size_qualifier, pb="PB"
)
if verbose:
_verbose_repr()
elif verbose is False: # specifically set to False, not nesc None
_non_verbose_repr()
else:
if exceeds_info_cols:
_non_verbose_repr()
else:
_verbose_repr()
counts = self._data.get_dtype_counts()
dtypes = ["{k}({kk:d})".format(k=k[0], kk=k[1]) for k in sorted(counts.items())]
lines.append("dtypes: {types}".format(types=", ".join(dtypes)))
if memory_usage is None:
memory_usage = get_option("display.memory_usage")
if memory_usage:
# append memory usage of df to display
size_qualifier = ""
if memory_usage == "deep":
deep = True
else:
# size_qualifier is just a best effort; not guaranteed to catch
# all cases (e.g., it misses categorical data even with object
# categories)
deep = False
if "object" in counts or self.index._is_memory_usage_qualified():
size_qualifier = "+"
mem_usage = self.memory_usage(index=True, deep=deep).sum()
lines.append(
"memory usage: {mem}\n".format(
mem=_sizeof_fmt(mem_usage, size_qualifier)
)
)
fmt.buffer_put_lines(buf, lines)
def memory_usage(self, index=True, deep=False):
"""
Return the memory usage of each column in bytes.
The memory usage can optionally include the contribution of
the index and elements of `object` dtype.
This value is displayed in `DataFrame.info` by default. This can be
suppressed by setting ``pandas.options.display.memory_usage`` to False.
Parameters
----------
index : bool, default True
Specifies whether to include the memory usage of the DataFrame's
index in returned Series. If ``index=True``, the memory usage of
the index is the first item in the output.
deep : bool, default False
If True, introspect the data deeply by interrogating
`object` dtypes for system-level memory consumption, and include
it in the returned values.
Returns
-------
Series
A Series whose index is the original column names and whose values
is the memory usage of each column in bytes.
See Also
--------
numpy.ndarray.nbytes : Total bytes consumed by the elements of an
ndarray.
Series.memory_usage : Bytes consumed by a Series.
Categorical : Memory-efficient array for string values with
many repeated values.
DataFrame.info : Concise summary of a DataFrame.
Examples
--------
>>> dtypes = ['int64', 'float64', 'complex128', 'object', 'bool']
>>> data = dict([(t, np.ones(shape=5000).astype(t))
... for t in dtypes])
>>> df = pd.DataFrame(data)
>>> df.head()
int64 float64 complex128 object bool
0 1 1.0 1.000000+0.000000j 1 True
1 1 1.0 1.000000+0.000000j 1 True
2 1 1.0 1.000000+0.000000j 1 True
3 1 1.0 1.000000+0.000000j 1 True
4 1 1.0 1.000000+0.000000j 1 True
>>> df.memory_usage()
Index 128
int64 40000
float64 40000
complex128 80000
object 40000
bool 5000
dtype: int64
>>> df.memory_usage(index=False)
int64 40000
float64 40000
complex128 80000
object 40000
bool 5000
dtype: int64
The memory footprint of `object` dtype columns is ignored by default:
>>> df.memory_usage(deep=True)
Index 128
int64 40000
float64 40000
complex128 80000
object 160000
bool 5000
dtype: int64
Use a Categorical for efficient storage of an object-dtype column with
many repeated values.
>>> df['object'].astype('category').memory_usage(deep=True)
5216
"""
result = Series(
[c.memory_usage(index=False, deep=deep) for col, c in self.items()],
index=self.columns,
)
if index:
result = Series(self.index.memory_usage(deep=deep), index=["Index"]).append(
result
)
return result
def transpose(self, *args, **kwargs):
"""
Transpose index and columns.
Reflect the DataFrame over its main diagonal by writing rows as columns
and vice-versa. The property :attr:`.T` is an accessor to the method
:meth:`transpose`.
Parameters
----------
*args, **kwargs
Additional arguments and keywords have no effect but might be
accepted for compatibility with numpy.
Returns
-------
DataFrame
The transposed DataFrame.
See Also
--------
numpy.transpose : Permute the dimensions of a given array.
Notes
-----
Transposing a DataFrame with mixed dtypes will result in a homogeneous
DataFrame with the `object` dtype. In such a case, a copy of the data
is always made.
Examples
--------
**Square DataFrame with homogeneous dtype**
>>> d1 = {'col1': [1, 2], 'col2': [3, 4]}
>>> df1 = pd.DataFrame(data=d1)
>>> df1
col1 col2
0 1 3
1 2 4
>>> df1_transposed = df1.T # or df1.transpose()
>>> df1_transposed
0 1
col1 1 2
col2 3 4
When the dtype is homogeneous in the original DataFrame, we get a
transposed DataFrame with the same dtype:
>>> df1.dtypes
col1 int64
col2 int64
dtype: object
>>> df1_transposed.dtypes
0 int64
1 int64
dtype: object
**Non-square DataFrame with mixed dtypes**
>>> d2 = {'name': ['Alice', 'Bob'],
... 'score': [9.5, 8],
... 'employed': [False, True],
... 'kids': [0, 0]}
>>> df2 = pd.DataFrame(data=d2)
>>> df2
name score employed kids
0 Alice 9.5 False 0
1 Bob 8.0 True 0
>>> df2_transposed = df2.T # or df2.transpose()
>>> df2_transposed
0 1
name Alice Bob
score 9.5 8
employed False True
kids 0 0
When the DataFrame has mixed dtypes, we get a transposed DataFrame with
the `object` dtype:
>>> df2.dtypes
name object
score float64
employed bool
kids int64
dtype: object
>>> df2_transposed.dtypes
0 object
1 object
dtype: object
"""
nv.validate_transpose(args, dict())
return super().transpose(1, 0, **kwargs)
T = property(transpose)
# ----------------------------------------------------------------------
# Indexing Methods
def _ixs(self, i: int, axis: int = 0):
"""
Parameters
----------
i : int
axis : int
Notes
-----
If slice passed, the resulting data will be a view.
"""
# irow
if axis == 0:
label = self.index[i]
new_values = self._data.fast_xs(i)
# if we are a copy, mark as such
copy = isinstance(new_values, np.ndarray) and new_values.base is None
result = self._constructor_sliced(
new_values,
index=self.columns,
name=self.index[i],
dtype=new_values.dtype,
)
result._set_is_copy(self, copy=copy)
return result
# icol
else:
label = self.columns[i]
# if the values returned are not the same length
# as the index (iow a not found value), iget returns
# a 0-len ndarray. This is effectively catching
# a numpy error (as numpy should really raise)
values = self._data.iget(i)
if len(self.index) and not len(values):
values = np.array([np.nan] * len(self.index), dtype=object)
result = self._box_col_values(values, label)
# this is a cached value, mark it so
result._set_as_cached(label, self)
return result
def __getitem__(self, key):
key = lib.item_from_zerodim(key)
key = com.apply_if_callable(key, self)
if is_hashable(key):
# shortcut if the key is in columns
if self.columns.is_unique and key in self.columns:
if self.columns.nlevels > 1:
return self._getitem_multilevel(key)
return self._get_item_cache(key)
# Do we have a slicer (on rows)?
indexer = convert_to_index_sliceable(self, key)
if indexer is not None:
# either we have a slice or we have a string that can be converted
# to a slice for partial-string date indexing
return self._slice(indexer, axis=0)
# Do we have a (boolean) DataFrame?
if isinstance(key, DataFrame):
return self.where(key)
# Do we have a (boolean) 1d indexer?
if com.is_bool_indexer(key):
return self._getitem_bool_array(key)
# We are left with two options: a single key, and a collection of keys,
# We interpret tuples as collections only for non-MultiIndex
is_single_key = isinstance(key, tuple) or not is_list_like(key)
if is_single_key:
if self.columns.nlevels > 1:
return self._getitem_multilevel(key)
indexer = self.columns.get_loc(key)
if is_integer(indexer):
indexer = [indexer]
else:
if is_iterator(key):
key = list(key)
indexer = self.loc._get_listlike_indexer(key, axis=1, raise_missing=True)[1]
# take() does not accept boolean indexers
if getattr(indexer, "dtype", None) == bool:
indexer = np.where(indexer)[0]
data = self.take(indexer, axis=1)
if is_single_key:
# What does looking for a single key in a non-unique index return?
# The behavior is inconsistent. It returns a Series, except when
# - the key itself is repeated (test on data.shape, #9519), or
# - we have a MultiIndex on columns (test on self.columns, #21309)
if data.shape[1] == 1 and not isinstance(self.columns, ABCMultiIndex):
data = data[key]
return data
def _getitem_bool_array(self, key):
# also raises Exception if object array with NA values
# warning here just in case -- previously __setitem__ was
# reindexing but __getitem__ was not; it seems more reasonable to
# go with the __setitem__ behavior since that is more consistent
# with all other indexing behavior
if isinstance(key, Series) and not key.index.equals(self.index):
warnings.warn(
"Boolean Series key will be reindexed to match DataFrame index.",
UserWarning,
stacklevel=3,
)
elif len(key) != len(self.index):
raise ValueError(
f"Item wrong length {len(key)} instead of {len(self.index)}."
)
# check_bool_indexer will throw exception if Series key cannot
# be reindexed to match DataFrame rows
key = check_bool_indexer(self.index, key)
indexer = key.nonzero()[0]
return self.take(indexer, axis=0)
def _getitem_multilevel(self, key):
# self.columns is a MultiIndex
loc = self.columns.get_loc(key)
if isinstance(loc, (slice, Series, np.ndarray, Index)):
new_columns = self.columns[loc]
result_columns = maybe_droplevels(new_columns, key)
if self._is_mixed_type:
result = self.reindex(columns=new_columns)
result.columns = result_columns
else:
new_values = self.values[:, loc]
result = self._constructor(
new_values, index=self.index, columns=result_columns
)
result = result.__finalize__(self)
# If there is only one column being returned, and its name is
# either an empty string, or a tuple with an empty string as its
# first element, then treat the empty string as a placeholder
# and return the column as if the user had provided that empty
# string in the key. If the result is a Series, exclude the
# implied empty string from its name.
if len(result.columns) == 1:
top = result.columns[0]
if isinstance(top, tuple):
top = top[0]
if top == "":
result = result[""]
if isinstance(result, Series):
result = self._constructor_sliced(
result, index=self.index, name=key
)
result._set_is_copy(self)
return result
else:
return self._get_item_cache(key)
def _get_value(self, index, col, takeable: bool = False):
"""
Quickly retrieve single value at passed column and index.
Parameters
----------
index : row label
col : column label
takeable : interpret the index/col as indexers, default False
Returns
-------
scalar
"""
if takeable:
series = self._iget_item_cache(col)
return com.maybe_box_datetimelike(series._values[index])
series = self._get_item_cache(col)
engine = self.index._engine
try:
return engine.get_value(series._values, index)
except KeyError:
# GH 20629
if self.index.nlevels > 1:
# partial indexing forbidden
raise
except (TypeError, ValueError):
pass
# we cannot handle direct indexing
# use positional
col = self.columns.get_loc(col)
index = self.index.get_loc(index)
return self._get_value(index, col, takeable=True)
def __setitem__(self, key, value):
key = com.apply_if_callable(key, self)
# see if we can slice the rows
indexer = convert_to_index_sliceable(self, key)
if indexer is not None:
# either we have a slice or we have a string that can be converted
# to a slice for partial-string date indexing
return self._setitem_slice(indexer, value)
if isinstance(key, DataFrame) or getattr(key, "ndim", None) == 2:
self._setitem_frame(key, value)
elif isinstance(key, (Series, np.ndarray, list, Index)):
self._setitem_array(key, value)
else:
# set column
self._set_item(key, value)
def _setitem_slice(self, key, value):
self._check_setitem_copy()
self.loc[key] = value
def _setitem_array(self, key, value):
# also raises Exception if object array with NA values
if com.is_bool_indexer(key):
if len(key) != len(self.index):
raise ValueError(
f"Item wrong length {len(key)} instead of {len(self.index)}!"
)
key = check_bool_indexer(self.index, key)
indexer = key.nonzero()[0]
self._check_setitem_copy()
self.loc._setitem_with_indexer(indexer, value)
else:
if isinstance(value, DataFrame):
if len(value.columns) != len(key):
raise ValueError("Columns must be same length as key")
for k1, k2 in zip(key, value.columns):
self[k1] = value[k2]
else:
indexer = self.loc._get_listlike_indexer(
key, axis=1, raise_missing=False
)[1]
self._check_setitem_copy()
self.loc._setitem_with_indexer((slice(None), indexer), value)
def _setitem_frame(self, key, value):
# support boolean setting with DataFrame input, e.g.
# df[df > df2] = 0
if isinstance(key, np.ndarray):
if key.shape != self.shape:
raise ValueError("Array conditional must be same shape as self")
key = self._constructor(key, **self._construct_axes_dict())
if key.values.size and not is_bool_dtype(key.values):
raise TypeError(
"Must pass DataFrame or 2-d ndarray with boolean values only"
)
self._check_inplace_setting(value)
self._check_setitem_copy()
self._where(-key, value, inplace=True)
def _set_item(self, key, value):
"""
Add series to DataFrame in specified column.
If series is a numpy-array (not a Series/TimeSeries), it must be the
same length as the DataFrames index or an error will be thrown.
Series/TimeSeries will be conformed to the DataFrames index to
ensure homogeneity.
"""
self._ensure_valid_index(value)
value = self._sanitize_column(key, value)
NDFrame._set_item(self, key, value)
# check if we are modifying a copy
# try to set first as we want an invalid
# value exception to occur first
if len(self):
self._check_setitem_copy()
def _set_value(self, index, col, value, takeable: bool = False):
"""
Put single value at passed column and index.
Parameters
----------
index : row label
col : column label
value : scalar
takeable : interpret the index/col as indexers, default False
Returns
-------
DataFrame
If label pair is contained, will be reference to calling DataFrame,
otherwise a new object.
"""
try:
if takeable is True:
series = self._iget_item_cache(col)
return series._set_value(index, value, takeable=True)
series = self._get_item_cache(col)
engine = self.index._engine
engine.set_value(series._values, index, value)
return self
except (KeyError, TypeError):
# set using a non-recursive method & reset the cache
if takeable:
self.iloc[index, col] = value
else:
self.loc[index, col] = value
self._item_cache.pop(col, None)
return self
def _ensure_valid_index(self, value):
"""
Ensure that if we don't have an index, that we can create one from the
passed value.
"""
# GH5632, make sure that we are a Series convertible
if not len(self.index) and is_list_like(value) and len(value):
try:
value = Series(value)
except (ValueError, NotImplementedError, TypeError):
raise ValueError(
"Cannot set a frame with no defined index "
"and a value that cannot be converted to a "
"Series"
)
self._data = self._data.reindex_axis(
value.index.copy(), axis=1, fill_value=np.nan
)
def _box_item_values(self, key, values):
items = self.columns[self.columns.get_loc(key)]
if values.ndim == 2:
return self._constructor(values.T, columns=items, index=self.index)
else:
return self._box_col_values(values, items)
def _box_col_values(self, values, items):
"""
Provide boxed values for a column.
"""
klass = self._constructor_sliced
return klass(values, index=self.index, name=items, fastpath=True)
# ----------------------------------------------------------------------
# Unsorted
def query(self, expr, inplace=False, **kwargs):
"""
Query the columns of a DataFrame with a boolean expression.
Parameters
----------
expr : str
The query string to evaluate. You can refer to variables
in the environment by prefixing them with an '@' character like
``@a + b``.
.. versionadded:: 0.25.0
You can refer to column names that contain spaces by surrounding
them in backticks.
For example, if one of your columns is called ``a a`` and you want
to sum it with ``b``, your query should be ```a a` + b``.
inplace : bool
Whether the query should modify the data in place or return
a modified copy.
**kwargs
See the documentation for :func:`eval` for complete details
on the keyword arguments accepted by :meth:`DataFrame.query`.
Returns
-------
DataFrame
DataFrame resulting from the provided query expression.
See Also
--------
eval : Evaluate a string describing operations on
DataFrame columns.
DataFrame.eval : Evaluate a string describing operations on
DataFrame columns.
Notes
-----
The result of the evaluation of this expression is first passed to
:attr:`DataFrame.loc` and if that fails because of a
multidimensional key (e.g., a DataFrame) then the result will be passed
to :meth:`DataFrame.__getitem__`.
This method uses the top-level :func:`eval` function to
evaluate the passed query.
The :meth:`~pandas.DataFrame.query` method uses a slightly
modified Python syntax by default. For example, the ``&`` and ``|``
(bitwise) operators have the precedence of their boolean cousins,
:keyword:`and` and :keyword:`or`. This *is* syntactically valid Python,
however the semantics are different.
You can change the semantics of the expression by passing the keyword
argument ``parser='python'``. This enforces the same semantics as
evaluation in Python space. Likewise, you can pass ``engine='python'``
to evaluate an expression using Python itself as a backend. This is not
recommended as it is inefficient compared to using ``numexpr`` as the
engine.
The :attr:`DataFrame.index` and
:attr:`DataFrame.columns` attributes of the
:class:`~pandas.DataFrame` instance are placed in the query namespace
by default, which allows you to treat both the index and columns of the
frame as a column in the frame.
The identifier ``index`` is used for the frame index; you can also
use the name of the index to identify it in a query. Please note that
Python keywords may not be used as identifiers.
For further details and examples see the ``query`` documentation in
:ref:`indexing <indexing.query>`.
Examples
--------
>>> df = pd.DataFrame({'A': range(1, 6),
... 'B': range(10, 0, -2),
... 'C C': range(10, 5, -1)})
>>> df
A B C C
0 1 10 10
1 2 8 9
2 3 6 8
3 4 4 7
4 5 2 6
>>> df.query('A > B')
A B C C
4 5 2 6
The previous expression is equivalent to
>>> df[df.A > df.B]
A B C C
4 5 2 6
For columns with spaces in their name, you can use backtick quoting.
>>> df.query('B == `C C`')
A B C C
0 1 10 10
The previous expression is equivalent to
>>> df[df.B == df['C C']]
A B C C
0 1 10 10
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if not isinstance(expr, str):
msg = "expr must be a string to be evaluated, {0} given"
raise ValueError(msg.format(type(expr)))
kwargs["level"] = kwargs.pop("level", 0) + 1
kwargs["target"] = None
res = self.eval(expr, **kwargs)
try:
new_data = self.loc[res]
except ValueError:
# when res is multi-dimensional loc raises, but this is sometimes a
# valid query
new_data = self[res]
if inplace:
self._update_inplace(new_data)
else:
return new_data
def eval(self, expr, inplace=False, **kwargs):
"""
Evaluate a string describing operations on DataFrame columns.
Operates on columns only, not specific rows or elements. This allows
`eval` to run arbitrary code, which can make you vulnerable to code
injection if you pass user input to this function.
Parameters
----------
expr : str
The expression string to evaluate.
inplace : bool, default False
If the expression contains an assignment, whether to perform the
operation inplace and mutate the existing DataFrame. Otherwise,
a new DataFrame is returned.
**kwargs
See the documentation for :func:`eval` for complete details
on the keyword arguments accepted by
:meth:`~pandas.DataFrame.query`.
Returns
-------
ndarray, scalar, or pandas object
The result of the evaluation.
See Also
--------
DataFrame.query : Evaluates a boolean expression to query the columns
of a frame.
DataFrame.assign : Can evaluate an expression or function to create new
values for a column.
eval : Evaluate a Python expression as a string using various
backends.
Notes
-----
For more details see the API documentation for :func:`~eval`.
For detailed examples see :ref:`enhancing performance with eval
<enhancingperf.eval>`.
Examples
--------
>>> df = pd.DataFrame({'A': range(1, 6), 'B': range(10, 0, -2)})
>>> df
A B
0 1 10
1 2 8
2 3 6
3 4 4
4 5 2
>>> df.eval('A + B')
0 11
1 10
2 9
3 8
4 7
dtype: int64
Assignment is allowed though by default the original DataFrame is not
modified.
>>> df.eval('C = A + B')
A B C
0 1 10 11
1 2 8 10
2 3 6 9
3 4 4 8
4 5 2 7
>>> df
A B
0 1 10
1 2 8
2 3 6
3 4 4
4 5 2
Use ``inplace=True`` to modify the original DataFrame.
>>> df.eval('C = A + B', inplace=True)
>>> df
A B C
0 1 10 11
1 2 8 10
2 3 6 9
3 4 4 8
4 5 2 7
"""
from pandas.core.computation.eval import eval as _eval
inplace = validate_bool_kwarg(inplace, "inplace")
resolvers = kwargs.pop("resolvers", None)
kwargs["level"] = kwargs.pop("level", 0) + 1
if resolvers is None:
index_resolvers = self._get_index_resolvers()
column_resolvers = self._get_space_character_free_column_resolvers()
resolvers = column_resolvers, index_resolvers
if "target" not in kwargs:
kwargs["target"] = self
kwargs["resolvers"] = kwargs.get("resolvers", ()) + tuple(resolvers)
return _eval(expr, inplace=inplace, **kwargs)
def select_dtypes(self, include=None, exclude=None):
"""
Return a subset of the DataFrame's columns based on the column dtypes.
Parameters
----------
include, exclude : scalar or list-like
A selection of dtypes or strings to be included/excluded. At least
one of these parameters must be supplied.
Returns
-------
DataFrame
The subset of the frame including the dtypes in ``include`` and
excluding the dtypes in ``exclude``.
Raises
------
ValueError
* If both of ``include`` and ``exclude`` are empty
* If ``include`` and ``exclude`` have overlapping elements
* If any kind of string dtype is passed in.
Notes
-----
* To select all *numeric* types, use ``np.number`` or ``'number'``
* To select strings you must use the ``object`` dtype, but note that
this will return *all* object dtype columns
* See the `numpy dtype hierarchy
<http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__
* To select datetimes, use ``np.datetime64``, ``'datetime'`` or
``'datetime64'``
* To select timedeltas, use ``np.timedelta64``, ``'timedelta'`` or
``'timedelta64'``
* To select Pandas categorical dtypes, use ``'category'``
* To select Pandas datetimetz dtypes, use ``'datetimetz'`` (new in
0.20.0) or ``'datetime64[ns, tz]'``
Examples
--------
>>> df = pd.DataFrame({'a': [1, 2] * 3,
... 'b': [True, False] * 3,
... 'c': [1.0, 2.0] * 3})
>>> df
a b c
0 1 True 1.0
1 2 False 2.0
2 1 True 1.0
3 2 False 2.0
4 1 True 1.0
5 2 False 2.0
>>> df.select_dtypes(include='bool')
b
0 True
1 False
2 True
3 False
4 True
5 False
>>> df.select_dtypes(include=['float64'])
c
0 1.0
1 2.0
2 1.0
3 2.0
4 1.0
5 2.0
>>> df.select_dtypes(exclude=['int'])
b c
0 True 1.0
1 False 2.0
2 True 1.0
3 False 2.0
4 True 1.0
5 False 2.0
"""
if not is_list_like(include):
include = (include,) if include is not None else ()
if not is_list_like(exclude):
exclude = (exclude,) if exclude is not None else ()
selection = (frozenset(include), frozenset(exclude))
if not any(selection):
raise ValueError("at least one of include or exclude must be nonempty")
# convert the myriad valid dtypes object to a single representation
include = frozenset(infer_dtype_from_object(x) for x in include)
exclude = frozenset(infer_dtype_from_object(x) for x in exclude)
for dtypes in (include, exclude):
invalidate_string_dtypes(dtypes)
# can't both include AND exclude!
if not include.isdisjoint(exclude):
raise ValueError(
"include and exclude overlap on {inc_ex}".format(
inc_ex=(include & exclude)
)
)
# We raise when both include and exclude are empty
# Hence, we can just shrink the columns we want to keep
keep_these = np.full(self.shape[1], True)
def extract_unique_dtypes_from_dtypes_set(
dtypes_set: FrozenSet[Dtype], unique_dtypes: np.ndarray
) -> List[Dtype]:
extracted_dtypes = [
unique_dtype
for unique_dtype in unique_dtypes
if issubclass(unique_dtype.type, tuple(dtypes_set)) # type: ignore
]
return extracted_dtypes
unique_dtypes = self.dtypes.unique()
if include:
included_dtypes = extract_unique_dtypes_from_dtypes_set(
include, unique_dtypes
)
keep_these &= self.dtypes.isin(included_dtypes)
if exclude:
excluded_dtypes = extract_unique_dtypes_from_dtypes_set(
exclude, unique_dtypes
)
keep_these &= ~self.dtypes.isin(excluded_dtypes)
return self.iloc[:, keep_these.values]
def insert(self, loc, column, value, allow_duplicates=False):
"""
Insert column into DataFrame at specified location.
Raises a ValueError if `column` is already contained in the DataFrame,
unless `allow_duplicates` is set to True.
Parameters
----------
loc : int
Insertion index. Must verify 0 <= loc <= len(columns).
column : str, number, or hashable object
Label of the inserted column.
value : int, Series, or array-like
allow_duplicates : bool, optional
"""
self._ensure_valid_index(value)
value = self._sanitize_column(column, value, broadcast=False)
self._data.insert(loc, column, value, allow_duplicates=allow_duplicates)
def assign(self, **kwargs):
r"""
Assign new columns to a DataFrame.
Returns a new object with all original columns in addition to new ones.
Existing columns that are re-assigned will be overwritten.
Parameters
----------
**kwargs : dict of {str: callable or Series}
The column names are keywords. If the values are
callable, they are computed on the DataFrame and
assigned to the new columns. The callable must not
change input DataFrame (though pandas doesn't check it).
If the values are not callable, (e.g. a Series, scalar, or array),
they are simply assigned.
Returns
-------
DataFrame
A new DataFrame with the new columns in addition to
all the existing columns.
Notes
-----
Assigning multiple columns within the same ``assign`` is possible.
Later items in '\*\*kwargs' may refer to newly created or modified
columns in 'df'; items are computed and assigned into 'df' in order.
.. versionchanged:: 0.23.0
Keyword argument order is maintained.
Examples
--------
>>> df = pd.DataFrame({'temp_c': [17.0, 25.0]},
... index=['Portland', 'Berkeley'])
>>> df
temp_c
Portland 17.0
Berkeley 25.0
Where the value is a callable, evaluated on `df`:
>>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32)
temp_c temp_f
Portland 17.0 62.6
Berkeley 25.0 77.0
Alternatively, the same behavior can be achieved by directly
referencing an existing Series or sequence:
>>> df.assign(temp_f=df['temp_c'] * 9 / 5 + 32)
temp_c temp_f
Portland 17.0 62.6
Berkeley 25.0 77.0
You can create multiple columns within the same assign where one
of the columns depends on another one defined within the same assign:
>>> df.assign(temp_f=lambda x: x['temp_c'] * 9 / 5 + 32,
... temp_k=lambda x: (x['temp_f'] + 459.67) * 5 / 9)
temp_c temp_f temp_k
Portland 17.0 62.6 290.15
Berkeley 25.0 77.0 298.15
"""
data = self.copy()
for k, v in kwargs.items():
data[k] = com.apply_if_callable(v, data)
return data
def _sanitize_column(self, key, value, broadcast=True):
"""
Ensures new columns (which go into the BlockManager as new blocks) are
always copied and converted into an array.
Parameters
----------
key : object
value : scalar, Series, or array-like
broadcast : bool, default True
If ``key`` matches multiple duplicate column names in the
DataFrame, this parameter indicates whether ``value`` should be
tiled so that the returned array contains a (duplicated) column for
each occurrence of the key. If False, ``value`` will not be tiled.
Returns
-------
numpy.ndarray
"""
def reindexer(value):
# reindex if necessary
if value.index.equals(self.index) or not len(self.index):
value = value._values.copy()
else:
# GH 4107
try:
value = value.reindex(self.index)._values
except ValueError as err:
# raised in MultiIndex.from_tuples, see test_insert_error_msmgs
if not value.index.is_unique:
# duplicate axis
raise err
# other
raise TypeError(
"incompatible index of inserted column with frame index"
)
return value
if isinstance(value, Series):
value = reindexer(value)
elif isinstance(value, DataFrame):
# align right-hand-side columns if self.columns
# is multi-index and self[key] is a sub-frame
if isinstance(self.columns, ABCMultiIndex) and key in self.columns:
loc = self.columns.get_loc(key)
if isinstance(loc, (slice, Series, np.ndarray, Index)):
cols = maybe_droplevels(self.columns[loc], key)
if len(cols) and not cols.equals(value.columns):
value = value.reindex(cols, axis=1)
# now align rows
value = reindexer(value).T
elif isinstance(value, ExtensionArray):
# Explicitly copy here, instead of in sanitize_index,
# as sanitize_index won't copy an EA, even with copy=True
value = value.copy()
value = sanitize_index(value, self.index, copy=False)
elif isinstance(value, Index) or is_sequence(value):
# turn me into an ndarray
value = sanitize_index(value, self.index, copy=False)
if not isinstance(value, (np.ndarray, Index)):
if isinstance(value, list) and len(value) > 0:
value = maybe_convert_platform(value)
else:
value = com.asarray_tuplesafe(value)
elif value.ndim == 2:
value = value.copy().T
elif isinstance(value, Index):
value = value.copy(deep=True)
else:
value = value.copy()
# possibly infer to datetimelike
if is_object_dtype(value.dtype):
value = maybe_infer_to_datetimelike(value)
else:
# cast ignores pandas dtypes. so save the dtype first
infer_dtype, _ = infer_dtype_from_scalar(value, pandas_dtype=True)
# upcast
value = cast_scalar_to_array(len(self.index), value)
value = maybe_cast_to_datetime(value, infer_dtype)
# return internal types directly
if is_extension_array_dtype(value):
return value
# broadcast across multiple columns if necessary
if broadcast and key in self.columns and value.ndim == 1:
if not self.columns.is_unique or isinstance(self.columns, ABCMultiIndex):
existing_piece = self[key]
if isinstance(existing_piece, DataFrame):
value = np.tile(value, (len(existing_piece.columns), 1))
return np.atleast_2d(np.asarray(value))
@property
def _series(self):
return {
item: Series(self._data.iget(idx), index=self.index, name=item)
for idx, item in enumerate(self.columns)
}
def lookup(self, row_labels, col_labels):
"""
Label-based "fancy indexing" function for DataFrame.
Given equal-length arrays of row and column labels, return an
array of the values corresponding to each (row, col) pair.
Parameters
----------
row_labels : sequence
The row labels to use for lookup.
col_labels : sequence
The column labels to use for lookup.
Returns
-------
numpy.ndarray
Examples
--------
values : ndarray
The found values
"""
n = len(row_labels)
if n != len(col_labels):
raise ValueError("Row labels must have same size as column labels")
thresh = 1000
if not self._is_mixed_type or n > thresh:
values = self.values
ridx = self.index.get_indexer(row_labels)
cidx = self.columns.get_indexer(col_labels)
if (ridx == -1).any():
raise KeyError("One or more row labels was not found")
if (cidx == -1).any():
raise KeyError("One or more column labels was not found")
flat_index = ridx * len(self.columns) + cidx
result = values.flat[flat_index]
else:
result = np.empty(n, dtype="O")
for i, (r, c) in enumerate(zip(row_labels, col_labels)):
result[i] = self._get_value(r, c)
if is_object_dtype(result):
result = lib.maybe_convert_objects(result)
return result
# ----------------------------------------------------------------------
# Reindexing and alignment
def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value, copy):
frame = self
columns = axes["columns"]
if columns is not None:
frame = frame._reindex_columns(
columns, method, copy, level, fill_value, limit, tolerance
)
index = axes["index"]
if index is not None:
frame = frame._reindex_index(
index, method, copy, level, fill_value, limit, tolerance
)
return frame
def _reindex_index(
self,
new_index,
method,
copy,
level,
fill_value=np.nan,
limit=None,
tolerance=None,
):
new_index, indexer = self.index.reindex(
new_index, method=method, level=level, limit=limit, tolerance=tolerance
)
return self._reindex_with_indexers(
{0: [new_index, indexer]},
copy=copy,
fill_value=fill_value,
allow_dups=False,
)
def _reindex_columns(
self,
new_columns,
method,
copy,
level,
fill_value=None,
limit=None,
tolerance=None,
):
new_columns, indexer = self.columns.reindex(
new_columns, method=method, level=level, limit=limit, tolerance=tolerance
)
return self._reindex_with_indexers(
{1: [new_columns, indexer]},
copy=copy,
fill_value=fill_value,
allow_dups=False,
)
def _reindex_multi(self, axes, copy, fill_value):
"""
We are guaranteed non-Nones in the axes.
"""
new_index, row_indexer = self.index.reindex(axes["index"])
new_columns, col_indexer = self.columns.reindex(axes["columns"])
if row_indexer is not None and col_indexer is not None:
indexer = row_indexer, col_indexer
new_values = algorithms.take_2d_multi(
self.values, indexer, fill_value=fill_value
)
return self._constructor(new_values, index=new_index, columns=new_columns)
else:
return self._reindex_with_indexers(
{0: [new_index, row_indexer], 1: [new_columns, col_indexer]},
copy=copy,
fill_value=fill_value,
)
@Appender(_shared_docs["align"] % _shared_doc_kwargs)
def align(
self,
other,
join="outer",
axis=None,
level=None,
copy=True,
fill_value=None,
method=None,
limit=None,
fill_axis=0,
broadcast_axis=None,
):
return super().align(
other,
join=join,
axis=axis,
level=level,
copy=copy,
fill_value=fill_value,
method=method,
limit=limit,
fill_axis=fill_axis,
broadcast_axis=broadcast_axis,
)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.reindex.__doc__)
@rewrite_axis_style_signature(
"labels",
[
("method", None),
("copy", True),
("level", None),
("fill_value", np.nan),
("limit", None),
("tolerance", None),
],
)
def reindex(self, *args, **kwargs):
axes = validate_axis_style_args(self, args, kwargs, "labels", "reindex")
kwargs.update(axes)
# Pop these, since the values are in `kwargs` under different names
kwargs.pop("axis", None)
kwargs.pop("labels", None)
return super().reindex(**kwargs)
def drop(
self,
labels=None,
axis=0,
index=None,
columns=None,
level=None,
inplace=False,
errors="raise",
):
"""
Drop specified labels from rows or columns.
Remove rows or columns by specifying label names and corresponding
axis, or by specifying directly index or column names. When using a
multi-index, labels on different levels can be removed by specifying
the level.
Parameters
----------
labels : single label or list-like
Index or column labels to drop.
axis : {0 or 'index', 1 or 'columns'}, default 0
Whether to drop labels from the index (0 or 'index') or
columns (1 or 'columns').
index : single label or list-like
Alternative to specifying axis (``labels, axis=0``
is equivalent to ``index=labels``).
.. versionadded:: 0.21.0
columns : single label or list-like
Alternative to specifying axis (``labels, axis=1``
is equivalent to ``columns=labels``).
.. versionadded:: 0.21.0
level : int or level name, optional
For MultiIndex, level from which the labels will be removed.
inplace : bool, default False
If True, do operation inplace and return None.
errors : {'ignore', 'raise'}, default 'raise'
If 'ignore', suppress error and only existing labels are
dropped.
Returns
-------
DataFrame
DataFrame without the removed index or column labels.
Raises
------
KeyError
If any of the labels is not found in the selected axis.
See Also
--------
DataFrame.loc : Label-location based indexer for selection by label.
DataFrame.dropna : Return DataFrame with labels on given axis omitted
where (all or any) data are missing.
DataFrame.drop_duplicates : Return DataFrame with duplicate rows
removed, optionally only considering certain columns.
Series.drop : Return Series with specified index labels removed.
Examples
--------
>>> df = pd.DataFrame(np.arange(12).reshape(3, 4),
... columns=['A', 'B', 'C', 'D'])
>>> df
A B C D
0 0 1 2 3
1 4 5 6 7
2 8 9 10 11
Drop columns
>>> df.drop(['B', 'C'], axis=1)
A D
0 0 3
1 4 7
2 8 11
>>> df.drop(columns=['B', 'C'])
A D
0 0 3
1 4 7
2 8 11
Drop a row by index
>>> df.drop([0, 1])
A B C D
2 8 9 10 11
Drop columns and/or rows of MultiIndex DataFrame
>>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> df = pd.DataFrame(index=midx, columns=['big', 'small'],
... data=[[45, 30], [200, 100], [1.5, 1], [30, 20],
... [250, 150], [1.5, 0.8], [320, 250],
... [1, 0.8], [0.3, 0.2]])
>>> df
big small
lama speed 45.0 30.0
weight 200.0 100.0
length 1.5 1.0
cow speed 30.0 20.0
weight 250.0 150.0
length 1.5 0.8
falcon speed 320.0 250.0
weight 1.0 0.8
length 0.3 0.2
>>> df.drop(index='cow', columns='small')
big
lama speed 45.0
weight 200.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
>>> df.drop(index='length', level=1)
big small
lama speed 45.0 30.0
weight 200.0 100.0
cow speed 30.0 20.0
weight 250.0 150.0
falcon speed 320.0 250.0
weight 1.0 0.8
"""
return super().drop(
labels=labels,
axis=axis,
index=index,
columns=columns,
level=level,
inplace=inplace,
errors=errors,
)
@rewrite_axis_style_signature(
"mapper",
[("copy", True), ("inplace", False), ("level", None), ("errors", "ignore")],
)
def rename(self, *args, **kwargs):
"""
Alter axes labels.
Function / dict values must be unique (1-to-1). Labels not contained in
a dict / Series will be left as-is. Extra labels listed don't throw an
error.
See the :ref:`user guide <basics.rename>` for more.
Parameters
----------
mapper : dict-like or function
Dict-like or functions transformations to apply to
that axis' values. Use either ``mapper`` and ``axis`` to
specify the axis to target with ``mapper``, or ``index`` and
``columns``.
index : dict-like or function
Alternative to specifying axis (``mapper, axis=0``
is equivalent to ``index=mapper``).
columns : dict-like or function
Alternative to specifying axis (``mapper, axis=1``
is equivalent to ``columns=mapper``).
axis : int or str
Axis to target with ``mapper``. Can be either the axis name
('index', 'columns') or number (0, 1). The default is 'index'.
copy : bool, default True
Also copy underlying data.
inplace : bool, default False
Whether to return a new DataFrame. If True then value of copy is
ignored.
level : int or level name, default None
In case of a MultiIndex, only rename labels in the specified
level.
errors : {'ignore', 'raise'}, default 'ignore'
If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`,
or `columns` contains labels that are not present in the Index
being transformed.
If 'ignore', existing keys will be renamed and extra keys will be
ignored.
Returns
-------
DataFrame
DataFrame with the renamed axis labels.
Raises
------
KeyError
If any of the labels is not found in the selected axis and
"errors='raise'".
See Also
--------
DataFrame.rename_axis : Set the name of the axis.
Examples
--------
``DataFrame.rename`` supports two calling conventions
* ``(index=index_mapper, columns=columns_mapper, ...)``
* ``(mapper, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
Rename columns using a mapping:
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
>>> df.rename(columns={"A": "a", "B": "c"})
a c
0 1 4
1 2 5
2 3 6
Rename index using a mapping:
>>> df.rename(index={0: "x", 1: "y", 2: "z"})
A B
x 1 4
y 2 5
z 3 6
Cast index labels to a different type:
>>> df.index
RangeIndex(start=0, stop=3, step=1)
>>> df.rename(index=str).index
Index(['0', '1', '2'], dtype='object')
>>> df.rename(columns={"A": "a", "B": "b", "C": "c"}, errors="raise")
Traceback (most recent call last):
KeyError: ['C'] not found in axis
Using axis-style parameters
>>> df.rename(str.lower, axis='columns')
a b
0 1 4
1 2 5
2 3 6
>>> df.rename({1: 2, 2: 4}, axis='index')
A B
0 1 4
2 2 5
4 3 6
"""
axes = validate_axis_style_args(self, args, kwargs, "mapper", "rename")
kwargs.update(axes)
# Pop these, since the values are in `kwargs` under different names
kwargs.pop("axis", None)
kwargs.pop("mapper", None)
return super().rename(**kwargs)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.fillna.__doc__)
def fillna(
self,
value=None,
method=None,
axis=None,
inplace=False,
limit=None,
downcast=None,
**kwargs,
):
return super().fillna(
value=value,
method=method,
axis=axis,
inplace=inplace,
limit=limit,
downcast=downcast,
**kwargs,
)
@Appender(_shared_docs["replace"] % _shared_doc_kwargs)
def replace(
self,
to_replace=None,
value=None,
inplace=False,
limit=None,
regex=False,
method="pad",
):
return super().replace(
to_replace=to_replace,
value=value,
inplace=inplace,
limit=limit,
regex=regex,
method=method,
)
@Appender(_shared_docs["shift"] % _shared_doc_kwargs)
def shift(self, periods=1, freq=None, axis=0, fill_value=None):
return super().shift(
periods=periods, freq=freq, axis=axis, fill_value=fill_value
)
def set_index(
self, keys, drop=True, append=False, inplace=False, verify_integrity=False
):
"""
Set the DataFrame index using existing columns.
Set the DataFrame index (row labels) using one or more existing
columns or arrays (of the correct length). The index can replace the
existing index or expand on it.
Parameters
----------
keys : label or array-like or list of labels/arrays
This parameter can be either a single column key, a single array of
the same length as the calling DataFrame, or a list containing an
arbitrary combination of column keys and arrays. Here, "array"
encompasses :class:`Series`, :class:`Index`, ``np.ndarray``, and
instances of :class:`~collections.abc.Iterator`.
drop : bool, default True
Delete columns to be used as the new index.
append : bool, default False
Whether to append columns to existing index.
inplace : bool, default False
Modify the DataFrame in place (do not create a new object).
verify_integrity : bool, default False
Check the new index for duplicates. Otherwise defer the check until
necessary. Setting to False will improve the performance of this
method.
Returns
-------
DataFrame
Changed row labels.
See Also
--------
DataFrame.reset_index : Opposite of set_index.
DataFrame.reindex : Change to new indices or expand indices.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
>>> df = pd.DataFrame({'month': [1, 4, 7, 10],
... 'year': [2012, 2014, 2013, 2014],
... 'sale': [55, 40, 84, 31]})
>>> df
month year sale
0 1 2012 55
1 4 2014 40
2 7 2013 84
3 10 2014 31
Set the index to become the 'month' column:
>>> df.set_index('month')
year sale
month
1 2012 55
4 2014 40
7 2013 84
10 2014 31
Create a MultiIndex using columns 'year' and 'month':
>>> df.set_index(['year', 'month'])
sale
year month
2012 1 55
2014 4 40
2013 7 84
2014 10 31
Create a MultiIndex using an Index and a column:
>>> df.set_index([pd.Index([1, 2, 3, 4]), 'year'])
month sale
year
1 2012 1 55
2 2014 4 40
3 2013 7 84
4 2014 10 31
Create a MultiIndex using two Series:
>>> s = pd.Series([1, 2, 3, 4])
>>> df.set_index([s, s**2])
month year sale
1 1 1 2012 55
2 4 4 2014 40
3 9 7 2013 84
4 16 10 2014 31
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if not isinstance(keys, list):
keys = [keys]
err_msg = (
'The parameter "keys" may be a column key, one-dimensional '
"array, or a list containing only valid column keys and "
"one-dimensional arrays."
)
missing = []
for col in keys:
if isinstance(
col, (ABCIndexClass, ABCSeries, np.ndarray, list, abc.Iterator)
):
# arrays are fine as long as they are one-dimensional
# iterators get converted to list below
if getattr(col, "ndim", 1) != 1:
raise ValueError(err_msg)
else:
# everything else gets tried as a key; see GH 24969
try:
found = col in self.columns
except TypeError:
raise TypeError(
err_msg + " Received column of type {}".format(type(col))
)
else:
if not found:
missing.append(col)
if missing:
raise KeyError("None of {} are in the columns".format(missing))
if inplace:
frame = self
else:
frame = self.copy()
arrays = []
names = []
if append:
names = list(self.index.names)
if isinstance(self.index, ABCMultiIndex):
for i in range(self.index.nlevels):
arrays.append(self.index._get_level_values(i))
else:
arrays.append(self.index)
to_remove = []
for col in keys:
if isinstance(col, ABCMultiIndex):
for n in range(col.nlevels):
arrays.append(col._get_level_values(n))
names.extend(col.names)
elif isinstance(col, (ABCIndexClass, ABCSeries)):
# if Index then not MultiIndex (treated above)
arrays.append(col)
names.append(col.name)
elif isinstance(col, (list, np.ndarray)):
arrays.append(col)
names.append(None)
elif isinstance(col, abc.Iterator):
arrays.append(list(col))
names.append(None)
# from here, col can only be a column label
else:
arrays.append(frame[col]._values)
names.append(col)
if drop:
to_remove.append(col)
if len(arrays[-1]) != len(self):
# check newest element against length of calling frame, since
# ensure_index_from_sequences would not raise for append=False.
raise ValueError(
"Length mismatch: Expected {len_self} rows, "
"received array of length {len_col}".format(
len_self=len(self), len_col=len(arrays[-1])
)
)
index = ensure_index_from_sequences(arrays, names)
if verify_integrity and not index.is_unique:
duplicates = index[index.duplicated()].unique()
raise ValueError("Index has duplicate keys: {dup}".format(dup=duplicates))
# use set to handle duplicate column names gracefully in case of drop
for c in set(to_remove):
del frame[c]
# clear up memory usage
index._cleanup()
frame.index = index
if not inplace:
return frame
def reset_index(
self, level=None, drop=False, inplace=False, col_level=0, col_fill=""
):
"""
Reset the index, or a level of it.
Reset the index of the DataFrame, and use the default one instead.
If the DataFrame has a MultiIndex, this method can remove one or more
levels.
Parameters
----------
level : int, str, tuple, or list, default None
Only remove the given levels from the index. Removes all levels by
default.
drop : bool, default False
Do not try to insert index into dataframe columns. This resets
the index to the default integer index.
inplace : bool, default False
Modify the DataFrame in place (do not create a new object).
col_level : int or str, default 0
If the columns have multiple levels, determines which level the
labels are inserted into. By default it is inserted into the first
level.
col_fill : object, default ''
If the columns have multiple levels, determines how the other
levels are named. If None then the index name is repeated.
Returns
-------
DataFrame
DataFrame with the new index.
See Also
--------
DataFrame.set_index : Opposite of reset_index.
DataFrame.reindex : Change to new indices or expand indices.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
>>> df = pd.DataFrame([('bird', 389.0),
... ('bird', 24.0),
... ('mammal', 80.5),
... ('mammal', np.nan)],
... index=['falcon', 'parrot', 'lion', 'monkey'],
... columns=('class', 'max_speed'))
>>> df
class max_speed
falcon bird 389.0
parrot bird 24.0
lion mammal 80.5
monkey mammal NaN
When we reset the index, the old index is added as a column, and a
new sequential index is used:
>>> df.reset_index()
index class max_speed
0 falcon bird 389.0
1 parrot bird 24.0
2 lion mammal 80.5
3 monkey mammal NaN
We can use the `drop` parameter to avoid the old index being added as
a column:
>>> df.reset_index(drop=True)
class max_speed
0 bird 389.0
1 bird 24.0
2 mammal 80.5
3 mammal NaN
You can also use `reset_index` with `MultiIndex`.
>>> index = pd.MultiIndex.from_tuples([('bird', 'falcon'),
... ('bird', 'parrot'),
... ('mammal', 'lion'),
... ('mammal', 'monkey')],
... names=['class', 'name'])
>>> columns = pd.MultiIndex.from_tuples([('speed', 'max'),
... ('species', 'type')])
>>> df = pd.DataFrame([(389.0, 'fly'),
... ( 24.0, 'fly'),
... ( 80.5, 'run'),
... (np.nan, 'jump')],
... index=index,
... columns=columns)
>>> df
speed species
max type
class name
bird falcon 389.0 fly
parrot 24.0 fly
mammal lion 80.5 run
monkey NaN jump
If the index has multiple levels, we can reset a subset of them:
>>> df.reset_index(level='class')
class speed species
max type
name
falcon bird 389.0 fly
parrot bird 24.0 fly
lion mammal 80.5 run
monkey mammal NaN jump
If we are not dropping the index, by default, it is placed in the top
level. We can place it in another level:
>>> df.reset_index(level='class', col_level=1)
speed species
class max type
name
falcon bird 389.0 fly
parrot bird 24.0 fly
lion mammal 80.5 run
monkey mammal NaN jump
When the index is inserted under another level, we can specify under
which one with the parameter `col_fill`:
>>> df.reset_index(level='class', col_level=1, col_fill='species')
species speed species
class max type
name
falcon bird 389.0 fly
parrot bird 24.0 fly
lion mammal 80.5 run
monkey mammal NaN jump
If we specify a nonexistent level for `col_fill`, it is created:
>>> df.reset_index(level='class', col_level=1, col_fill='genus')
genus speed species
class max type
name
falcon bird 389.0 fly
parrot bird 24.0 fly
lion mammal 80.5 run
monkey mammal NaN jump
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if inplace:
new_obj = self
else:
new_obj = self.copy()
def _maybe_casted_values(index, labels=None):
values = index._values
if not isinstance(index, (PeriodIndex, DatetimeIndex)):
if values.dtype == np.object_:
values = lib.maybe_convert_objects(values)
# if we have the labels, extract the values with a mask
if labels is not None:
mask = labels == -1
# we can have situations where the whole mask is -1,
# meaning there is nothing found in labels, so make all nan's
if mask.all():
values = np.empty(len(mask))
values.fill(np.nan)
else:
values = values.take(labels)
# TODO(https://github.com/pandas-dev/pandas/issues/24206)
# Push this into maybe_upcast_putmask?
# We can't pass EAs there right now. Looks a bit
# complicated.
# So we unbox the ndarray_values, op, re-box.
values_type = type(values)
values_dtype = values.dtype
if issubclass(values_type, DatetimeLikeArray):
values = values._data
if mask.any():
values, changed = maybe_upcast_putmask(values, mask, np.nan)
if issubclass(values_type, DatetimeLikeArray):
values = values_type(values, dtype=values_dtype)
return values
new_index = ibase.default_index(len(new_obj))
if level is not None:
if not isinstance(level, (tuple, list)):
level = [level]
level = [self.index._get_level_number(lev) for lev in level]
if len(level) < self.index.nlevels:
new_index = self.index.droplevel(level)
if not drop:
if isinstance(self.index, ABCMultiIndex):
names = [
(n if n is not None else f"level_{i}")
for i, n in enumerate(self.index.names)
]
to_insert = zip(self.index.levels, self.index.codes)
else:
default = "index" if "index" not in self else "level_0"
names = [default] if self.index.name is None else [self.index.name]
to_insert = ((self.index, None),)
multi_col = isinstance(self.columns, ABCMultiIndex)
for i, (lev, lab) in reversed(list(enumerate(to_insert))):
if not (level is None or i in level):
continue
name = names[i]
if multi_col:
col_name = list(name) if isinstance(name, tuple) else [name]
if col_fill is None:
if len(col_name) not in (1, self.columns.nlevels):
raise ValueError(
"col_fill=None is incompatible "
"with incomplete column name "
"{}".format(name)
)
col_fill = col_name[0]
lev_num = self.columns._get_level_number(col_level)
name_lst = [col_fill] * lev_num + col_name
missing = self.columns.nlevels - len(name_lst)
name_lst += [col_fill] * missing
name = tuple(name_lst)
# to ndarray and maybe infer different dtype
level_values = _maybe_casted_values(lev, lab)
new_obj.insert(0, name, level_values)
new_obj.index = new_index
if not inplace:
return new_obj
# ----------------------------------------------------------------------
# Reindex-based selection methods
@Appender(_shared_docs["isna"] % _shared_doc_kwargs)
def isna(self):
return super().isna()
@Appender(_shared_docs["isna"] % _shared_doc_kwargs)
def isnull(self):
return super().isnull()
@Appender(_shared_docs["notna"] % _shared_doc_kwargs)
def notna(self):
return super().notna()
@Appender(_shared_docs["notna"] % _shared_doc_kwargs)
def notnull(self):
return super().notnull()
def dropna(self, axis=0, how="any", thresh=None, subset=None, inplace=False):
"""
Remove missing values.
See the :ref:`User Guide <missing_data>` for more on which values are
considered missing, and how to work with missing data.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
Determine if rows or columns which contain missing values are
removed.
* 0, or 'index' : Drop rows which contain missing values.
* 1, or 'columns' : Drop columns which contain missing value.
.. versionchanged:: 1.0.0
Pass tuple or list to drop on multiple axes.
Only a single axis is allowed.
how : {'any', 'all'}, default 'any'
Determine if row or column is removed from DataFrame, when we have
at least one NA or all NA.
* 'any' : If any NA values are present, drop that row or column.
* 'all' : If all values are NA, drop that row or column.
thresh : int, optional
Require that many non-NA values.
subset : array-like, optional
Labels along other axis to consider, e.g. if you are dropping rows
these would be a list of columns to include.
inplace : bool, default False
If True, do operation inplace and return None.
Returns
-------
DataFrame
DataFrame with NA entries dropped from it.
See Also
--------
DataFrame.isna: Indicate missing values.
DataFrame.notna : Indicate existing (non-missing) values.
DataFrame.fillna : Replace missing values.
Series.dropna : Drop missing values.
Index.dropna : Drop missing indices.
Examples
--------
>>> df = pd.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'],
... "toy": [np.nan, 'Batmobile', 'Bullwhip'],
... "born": [pd.NaT, pd.Timestamp("1940-04-25"),
... pd.NaT]})
>>> df
name toy born
0 Alfred NaN NaT
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip NaT
Drop the rows where at least one element is missing.
>>> df.dropna()
name toy born
1 Batman Batmobile 1940-04-25
Drop the columns where at least one element is missing.
>>> df.dropna(axis='columns')
name
0 Alfred
1 Batman
2 Catwoman
Drop the rows where all elements are missing.
>>> df.dropna(how='all')
name toy born
0 Alfred NaN NaT
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip NaT
Keep only the rows with at least 2 non-NA values.
>>> df.dropna(thresh=2)
name toy born
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip NaT
Define in which columns to look for missing values.
>>> df.dropna(subset=['name', 'born'])
name toy born
1 Batman Batmobile 1940-04-25
Keep the DataFrame with valid entries in the same variable.
>>> df.dropna(inplace=True)
>>> df
name toy born
1 Batman Batmobile 1940-04-25
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if isinstance(axis, (tuple, list)):
# GH20987
raise TypeError("supplying multiple axes to axis is no longer supported.")
axis = self._get_axis_number(axis)
agg_axis = 1 - axis
agg_obj = self
if subset is not None:
ax = self._get_axis(agg_axis)
indices = ax.get_indexer_for(subset)
check = indices == -1
if check.any():
raise KeyError(list(np.compress(check, subset)))
agg_obj = self.take(indices, axis=agg_axis)
count = agg_obj.count(axis=agg_axis)
if thresh is not None:
mask = count >= thresh
elif how == "any":
mask = count == len(agg_obj._get_axis(agg_axis))
elif how == "all":
mask = count > 0
else:
if how is not None:
raise ValueError("invalid how option: {h}".format(h=how))
else:
raise TypeError("must specify how or thresh")
result = self.loc(axis=axis)[mask]
if inplace:
self._update_inplace(result)
else:
return result
def drop_duplicates(self, subset=None, keep="first", inplace=False):
"""
Return DataFrame with duplicate rows removed.
Considering certain columns is optional. Indexes, including time indexes
are ignored.
Parameters
----------
subset : column label or sequence of labels, optional
Only consider certain columns for identifying duplicates, by
default use all of the columns.
keep : {'first', 'last', False}, default 'first'
Determines which duplicates (if any) to keep.
- ``first`` : Drop duplicates except for the first occurrence.
- ``last`` : Drop duplicates except for the last occurrence.
- False : Drop all duplicates.
inplace : bool, default False
Whether to drop duplicates in place or to return a copy.
Returns
-------
DataFrame
"""
if self.empty:
return self.copy()
inplace = validate_bool_kwarg(inplace, "inplace")
duplicated = self.duplicated(subset, keep=keep)
if inplace:
(inds,) = (-duplicated)._ndarray_values.nonzero()
new_data = self._data.take(inds)
self._update_inplace(new_data)
else:
return self[-duplicated]
def duplicated(self, subset=None, keep="first"):
"""
Return boolean Series denoting duplicate rows.
Considering certain columns is optional.
Parameters
----------
subset : column label or sequence of labels, optional
Only consider certain columns for identifying duplicates, by
default use all of the columns.
keep : {'first', 'last', False}, default 'first'
Determines which duplicates (if any) to mark.
- ``first`` : Mark duplicates as ``True`` except for the first occurrence.
- ``last`` : Mark duplicates as ``True`` except for the last occurrence.
- False : Mark all duplicates as ``True``.
Returns
-------
Series
"""
from pandas.core.sorting import get_group_index
from pandas._libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT
if self.empty:
return Series(dtype=bool)
def f(vals):
labels, shape = algorithms.factorize(
vals, size_hint=min(len(self), _SIZE_HINT_LIMIT)
)
return labels.astype("i8", copy=False), len(shape)
if subset is None:
subset = self.columns
elif (
not np.iterable(subset)
or isinstance(subset, str)
or isinstance(subset, tuple)
and subset in self.columns
):
subset = (subset,)
# Verify all columns in subset exist in the queried dataframe
# Otherwise, raise a KeyError, same as if you try to __getitem__ with a
# key that doesn't exist.
diff = Index(subset).difference(self.columns)
if not diff.empty:
raise KeyError(diff)
vals = (col.values for name, col in self.items() if name in subset)
labels, shape = map(list, zip(*map(f, vals)))
ids = get_group_index(labels, shape, sort=False, xnull=False)
return Series(duplicated_int64(ids, keep), index=self.index)
# ----------------------------------------------------------------------
# Sorting
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.sort_values.__doc__)
def sort_values(
self,
by,
axis=0,
ascending=True,
inplace=False,
kind="quicksort",
na_position="last",
):
inplace = validate_bool_kwarg(inplace, "inplace")
axis = self._get_axis_number(axis)
if not isinstance(by, list):
by = [by]
if is_sequence(ascending) and len(by) != len(ascending):
raise ValueError(
f"Length of ascending ({len(ascending)}) != length of by ({len(by)})"
)
if len(by) > 1:
from pandas.core.sorting import lexsort_indexer
keys = [self._get_label_or_level_values(x, axis=axis) for x in by]
indexer = lexsort_indexer(keys, orders=ascending, na_position=na_position)
indexer = ensure_platform_int(indexer)
else:
from pandas.core.sorting import nargsort
by = by[0]
k = self._get_label_or_level_values(by, axis=axis)
if isinstance(ascending, (tuple, list)):
ascending = ascending[0]
indexer = nargsort(
k, kind=kind, ascending=ascending, na_position=na_position
)
new_data = self._data.take(
indexer, axis=self._get_block_manager_axis(axis), verify=False
)
if inplace:
return self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.sort_index.__doc__)
def sort_index(
self,
axis=0,
level=None,
ascending=True,
inplace=False,
kind="quicksort",
na_position="last",
sort_remaining=True,
):
# TODO: this can be combined with Series.sort_index impl as
# almost identical
inplace = validate_bool_kwarg(inplace, "inplace")
axis = self._get_axis_number(axis)
labels = self._get_axis(axis)
# make sure that the axis is lexsorted to start
# if not we need to reconstruct to get the correct indexer
labels = labels._sort_levels_monotonic()
if level is not None:
new_axis, indexer = labels.sortlevel(
level, ascending=ascending, sort_remaining=sort_remaining
)
elif isinstance(labels, ABCMultiIndex):
from pandas.core.sorting import lexsort_indexer
indexer = lexsort_indexer(
labels._get_codes_for_sorting(),
orders=ascending,
na_position=na_position,
)
else:
from pandas.core.sorting import nargsort
# Check monotonic-ness before sort an index
# GH11080
if (ascending and labels.is_monotonic_increasing) or (
not ascending and labels.is_monotonic_decreasing
):
if inplace:
return
else:
return self.copy()
indexer = nargsort(
labels, kind=kind, ascending=ascending, na_position=na_position
)
baxis = self._get_block_manager_axis(axis)
new_data = self._data.take(indexer, axis=baxis, verify=False)
# reconstruct axis if needed
new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic()
if inplace:
return self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
def nlargest(self, n, columns, keep="first"):
"""
Return the first `n` rows ordered by `columns` in descending order.
Return the first `n` rows with the largest values in `columns`, in
descending order. The columns that are not specified are returned as
well, but not used for ordering.
This method is equivalent to
``df.sort_values(columns, ascending=False).head(n)``, but more
performant.
Parameters
----------
n : int
Number of rows to return.
columns : label or list of labels
Column label(s) to order by.
keep : {'first', 'last', 'all'}, default 'first'
Where there are duplicate values:
- `first` : prioritize the first occurrence(s)
- `last` : prioritize the last occurrence(s)
- ``all`` : do not drop any duplicates, even it means
selecting more than `n` items.
.. versionadded:: 0.24.0
Returns
-------
DataFrame
The first `n` rows ordered by the given columns in descending
order.
See Also
--------
DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in
ascending order.
DataFrame.sort_values : Sort DataFrame by the values.
DataFrame.head : Return the first `n` rows without re-ordering.
Notes
-----
This function cannot be used with all column types. For example, when
specifying columns with `object` or `category` dtypes, ``TypeError`` is
raised.
Examples
--------
>>> df = pd.DataFrame({'population': [59000000, 65000000, 434000,
... 434000, 434000, 337000, 11300,
... 11300, 11300],
... 'GDP': [1937894, 2583560 , 12011, 4520, 12128,
... 17036, 182, 38, 311],
... 'alpha-2': ["IT", "FR", "MT", "MV", "BN",
... "IS", "NR", "TV", "AI"]},
... index=["Italy", "France", "Malta",
... "Maldives", "Brunei", "Iceland",
... "Nauru", "Tuvalu", "Anguilla"])
>>> df
population GDP alpha-2
Italy 59000000 1937894 IT
France 65000000 2583560 FR
Malta 434000 12011 MT
Maldives 434000 4520 MV
Brunei 434000 12128 BN
Iceland 337000 17036 IS
Nauru 11300 182 NR
Tuvalu 11300 38 TV
Anguilla 11300 311 AI
In the following example, we will use ``nlargest`` to select the three
rows having the largest values in column "population".
>>> df.nlargest(3, 'population')
population GDP alpha-2
France 65000000 2583560 FR
Italy 59000000 1937894 IT
Malta 434000 12011 MT
When using ``keep='last'``, ties are resolved in reverse order:
>>> df.nlargest(3, 'population', keep='last')
population GDP alpha-2
France 65000000 2583560 FR
Italy 59000000 1937894 IT
Brunei 434000 12128 BN
When using ``keep='all'``, all duplicate items are maintained:
>>> df.nlargest(3, 'population', keep='all')
population GDP alpha-2
France 65000000 2583560 FR
Italy 59000000 1937894 IT
Malta 434000 12011 MT
Maldives 434000 4520 MV
Brunei 434000 12128 BN
To order by the largest values in column "population" and then "GDP",
we can specify multiple columns like in the next example.
>>> df.nlargest(3, ['population', 'GDP'])
population GDP alpha-2
France 65000000 2583560 FR
Italy 59000000 1937894 IT
Brunei 434000 12128 BN
"""
return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nlargest()
def nsmallest(self, n, columns, keep="first"):
"""
Return the first `n` rows ordered by `columns` in ascending order.
Return the first `n` rows with the smallest values in `columns`, in
ascending order. The columns that are not specified are returned as
well, but not used for ordering.
This method is equivalent to
``df.sort_values(columns, ascending=True).head(n)``, but more
performant.
Parameters
----------
n : int
Number of items to retrieve.
columns : list or str
Column name or names to order by.
keep : {'first', 'last', 'all'}, default 'first'
Where there are duplicate values:
- ``first`` : take the first occurrence.
- ``last`` : take the last occurrence.
- ``all`` : do not drop any duplicates, even it means
selecting more than `n` items.
.. versionadded:: 0.24.0
Returns
-------
DataFrame
See Also
--------
DataFrame.nlargest : Return the first `n` rows ordered by `columns` in
descending order.
DataFrame.sort_values : Sort DataFrame by the values.
DataFrame.head : Return the first `n` rows without re-ordering.
Examples
--------
>>> df = pd.DataFrame({'population': [59000000, 65000000, 434000,
... 434000, 434000, 337000, 11300,
... 11300, 11300],
... 'GDP': [1937894, 2583560 , 12011, 4520, 12128,
... 17036, 182, 38, 311],
... 'alpha-2': ["IT", "FR", "MT", "MV", "BN",
... "IS", "NR", "TV", "AI"]},
... index=["Italy", "France", "Malta",
... "Maldives", "Brunei", "Iceland",
... "Nauru", "Tuvalu", "Anguilla"])
>>> df
population GDP alpha-2
Italy 59000000 1937894 IT
France 65000000 2583560 FR
Malta 434000 12011 MT
Maldives 434000 4520 MV
Brunei 434000 12128 BN
Iceland 337000 17036 IS
Nauru 11300 182 NR
Tuvalu 11300 38 TV
Anguilla 11300 311 AI
In the following example, we will use ``nsmallest`` to select the
three rows having the smallest values in column "a".
>>> df.nsmallest(3, 'population')
population GDP alpha-2
Nauru 11300 182 NR
Tuvalu 11300 38 TV
Anguilla 11300 311 AI
When using ``keep='last'``, ties are resolved in reverse order:
>>> df.nsmallest(3, 'population', keep='last')
population GDP alpha-2
Anguilla 11300 311 AI
Tuvalu 11300 38 TV
Nauru 11300 182 NR
When using ``keep='all'``, all duplicate items are maintained:
>>> df.nsmallest(3, 'population', keep='all')
population GDP alpha-2
Nauru 11300 182 NR
Tuvalu 11300 38 TV
Anguilla 11300 311 AI
To order by the largest values in column "a" and then "c", we can
specify multiple columns like in the next example.
>>> df.nsmallest(3, ['population', 'GDP'])
population GDP alpha-2
Tuvalu 11300 38 TV
Nauru 11300 182 NR
Anguilla 11300 311 AI
"""
return algorithms.SelectNFrame(
self, n=n, keep=keep, columns=columns
).nsmallest()
def swaplevel(self, i=-2, j=-1, axis=0):
"""
Swap levels i and j in a MultiIndex on a particular axis.
Parameters
----------
i, j : int or str
Levels of the indices to be swapped. Can pass level name as string.
Returns
-------
DataFrame
"""
result = self.copy()
axis = self._get_axis_number(axis)
if axis == 0:
result.index = result.index.swaplevel(i, j)
else:
result.columns = result.columns.swaplevel(i, j)
return result
def reorder_levels(self, order, axis=0):
"""
Rearrange index levels using input order. May not drop or duplicate levels.
Parameters
----------
order : list of int or list of str
List representing new level order. Reference level by number
(position) or by key (label).
axis : int
Where to reorder levels.
Returns
-------
type of caller (new object)
"""
axis = self._get_axis_number(axis)
if not isinstance(self._get_axis(axis), ABCMultiIndex): # pragma: no cover
raise TypeError("Can only reorder levels on a hierarchical axis.")
result = self.copy()
if axis == 0:
result.index = result.index.reorder_levels(order)
else:
result.columns = result.columns.reorder_levels(order)
return result
# ----------------------------------------------------------------------
# Arithmetic / combination related
def _combine_frame(self, other, func, fill_value=None, level=None):
this, other = self.align(other, join="outer", level=level, copy=False)
if fill_value is None:
# since _arith_op may be called in a loop, avoid function call
# overhead if possible by doing this check once
_arith_op = func
else:
def _arith_op(left, right):
# for the mixed_type case where we iterate over columns,
# _arith_op(left, right) is equivalent to
# left._binop(right, func, fill_value=fill_value)
left, right = ops.fill_binop(left, right, fill_value)
return func(left, right)
if ops.should_series_dispatch(this, other, func):
# iterate over columns
new_data = ops.dispatch_to_series(this, other, _arith_op)
else:
with np.errstate(all="ignore"):
res_values = _arith_op(this.values, other.values)
new_data = dispatch_fill_zeros(func, this.values, other.values, res_values)
return this._construct_result(new_data)
def _combine_match_index(self, other, func):
# at this point we have `self.index.equals(other.index)`
if ops.should_series_dispatch(self, other, func):
# operate column-wise; avoid costly object-casting in `.values`
new_data = ops.dispatch_to_series(self, other, func)
else:
# fastpath --> operate directly on values
with np.errstate(all="ignore"):
new_data = func(self.values.T, other.values).T
return new_data
def _construct_result(self, result) -> "DataFrame":
"""
Wrap the result of an arithmetic, comparison, or logical operation.
Parameters
----------
result : DataFrame
Returns
-------
DataFrame
"""
out = self._constructor(result, index=self.index, copy=False)
# Pin columns instead of passing to constructor for compat with
# non-unique columns case
out.columns = self.columns
return out
def combine(self, other, func, fill_value=None, overwrite=True):
"""
Perform column-wise combine with another DataFrame.
Combines a DataFrame with `other` DataFrame using `func`
to element-wise combine columns. The row and column indexes of the
resulting DataFrame will be the union of the two.
Parameters
----------
other : DataFrame
The DataFrame to merge column-wise.
func : function
Function that takes two series as inputs and return a Series or a
scalar. Used to merge the two dataframes column by columns.
fill_value : scalar value, default None
The value to fill NaNs with prior to passing any column to the
merge func.
overwrite : bool, default True
If True, columns in `self` that do not exist in `other` will be
overwritten with NaNs.
Returns
-------
DataFrame
Combination of the provided DataFrames.
See Also
--------
DataFrame.combine_first : Combine two DataFrame objects and default to
non-null values in frame calling the method.
Examples
--------
Combine using a simple function that chooses the smaller column.
>>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]})
>>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]})
>>> take_smaller = lambda s1, s2: s1 if s1.sum() < s2.sum() else s2
>>> df1.combine(df2, take_smaller)
A B
0 0 3
1 0 3
Example using a true element-wise combine function.
>>> df1 = pd.DataFrame({'A': [5, 0], 'B': [2, 4]})
>>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]})
>>> df1.combine(df2, np.minimum)
A B
0 1 2
1 0 3
Using `fill_value` fills Nones prior to passing the column to the
merge function.
>>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]})
>>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]})
>>> df1.combine(df2, take_smaller, fill_value=-5)
A B
0 0 -5.0
1 0 4.0
However, if the same element in both dataframes is None, that None
is preserved
>>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]})
>>> df2 = pd.DataFrame({'A': [1, 1], 'B': [None, 3]})
>>> df1.combine(df2, take_smaller, fill_value=-5)
A B
0 0 -5.0
1 0 3.0
Example that demonstrates the use of `overwrite` and behavior when
the axis differ between the dataframes.
>>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]})
>>> df2 = pd.DataFrame({'B': [3, 3], 'C': [-10, 1], }, index=[1, 2])
>>> df1.combine(df2, take_smaller)
A B C
0 NaN NaN NaN
1 NaN 3.0 -10.0
2 NaN 3.0 1.0
>>> df1.combine(df2, take_smaller, overwrite=False)
A B C
0 0.0 NaN NaN
1 0.0 3.0 -10.0
2 NaN 3.0 1.0
Demonstrating the preference of the passed in dataframe.
>>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1], }, index=[1, 2])
>>> df2.combine(df1, take_smaller)
A B C
0 0.0 NaN NaN
1 0.0 3.0 NaN
2 NaN 3.0 NaN
>>> df2.combine(df1, take_smaller, overwrite=False)
A B C
0 0.0 NaN NaN
1 0.0 3.0 1.0
2 NaN 3.0 1.0
"""
other_idxlen = len(other.index) # save for compare
this, other = self.align(other, copy=False)
new_index = this.index
if other.empty and len(new_index) == len(self.index):
return self.copy()
if self.empty and len(other) == other_idxlen:
return other.copy()
# sorts if possible
new_columns = this.columns.union(other.columns)
do_fill = fill_value is not None
result = {}
for col in new_columns:
series = this[col]
otherSeries = other[col]
this_dtype = series.dtype
other_dtype = otherSeries.dtype
this_mask = isna(series)
other_mask = isna(otherSeries)
# don't overwrite columns unnecessarily
# DO propagate if this column is not in the intersection
if not overwrite and other_mask.all():
result[col] = this[col].copy()
continue
if do_fill:
series = series.copy()
otherSeries = otherSeries.copy()
series[this_mask] = fill_value
otherSeries[other_mask] = fill_value
if col not in self.columns:
# If self DataFrame does not have col in other DataFrame,
# try to promote series, which is all NaN, as other_dtype.
new_dtype = other_dtype
try:
series = series.astype(new_dtype, copy=False)
except ValueError:
# e.g. new_dtype is integer types
pass
else:
# if we have different dtypes, possibly promote
new_dtype = find_common_type([this_dtype, other_dtype])
if not is_dtype_equal(this_dtype, new_dtype):
series = series.astype(new_dtype)
if not is_dtype_equal(other_dtype, new_dtype):
otherSeries = otherSeries.astype(new_dtype)
arr = func(series, otherSeries)
arr = maybe_downcast_to_dtype(arr, this_dtype)
result[col] = arr
# convert_objects just in case
return self._constructor(result, index=new_index, columns=new_columns)
def combine_first(self, other):
"""
Update null elements with value in the same location in `other`.
Combine two DataFrame objects by filling null values in one DataFrame
with non-null values from other DataFrame. The row and column indexes
of the resulting DataFrame will be the union of the two.
Parameters
----------
other : DataFrame
Provided DataFrame to use to fill null values.
Returns
-------
DataFrame
See Also
--------
DataFrame.combine : Perform series-wise operation on two DataFrames
using a given function.
Examples
--------
>>> df1 = pd.DataFrame({'A': [None, 0], 'B': [None, 4]})
>>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]})
>>> df1.combine_first(df2)
A B
0 1.0 3.0
1 0.0 4.0
Null values still persist if the location of that null value
does not exist in `other`
>>> df1 = pd.DataFrame({'A': [None, 0], 'B': [4, None]})
>>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1]}, index=[1, 2])
>>> df1.combine_first(df2)
A B C
0 NaN 4.0 NaN
1 0.0 3.0 1.0
2 NaN 3.0 1.0
"""
import pandas.core.computation.expressions as expressions
def extract_values(arr):
# Does two things:
# 1. maybe gets the values from the Series / Index
# 2. convert datelike to i8
if isinstance(arr, (ABCIndexClass, ABCSeries)):
arr = arr._values
if needs_i8_conversion(arr):
if is_extension_array_dtype(arr.dtype):
arr = arr.asi8
else:
arr = arr.view("i8")
return arr
def combiner(x, y):
mask = isna(x)
if isinstance(mask, (ABCIndexClass, ABCSeries)):
mask = mask._values
x_values = extract_values(x)
y_values = extract_values(y)
# If the column y in other DataFrame is not in first DataFrame,
# just return y_values.
if y.name not in self.columns:
return y_values
return expressions.where(mask, y_values, x_values)
return self.combine(other, combiner, overwrite=False)
def update(
self, other, join="left", overwrite=True, filter_func=None, errors="ignore"
):
"""
Modify in place using non-NA values from another DataFrame.
Aligns on indices. There is no return value.
Parameters
----------
other : DataFrame, or object coercible into a DataFrame
Should have at least one matching index/column label
with the original DataFrame. If a Series is passed,
its name attribute must be set, and that will be
used as the column name to align with the original DataFrame.
join : {'left'}, default 'left'
Only left join is implemented, keeping the index and columns of the
original object.
overwrite : bool, default True
How to handle non-NA values for overlapping keys:
* True: overwrite original DataFrame's values
with values from `other`.
* False: only update values that are NA in
the original DataFrame.
filter_func : callable(1d-array) -> bool 1d-array, optional
Can choose to replace values other than NA. Return True for values
that should be updated.
errors : {'raise', 'ignore'}, default 'ignore'
If 'raise', will raise a ValueError if the DataFrame and `other`
both contain non-NA data in the same place.
.. versionchanged:: 0.24.0
Changed from `raise_conflict=False|True`
to `errors='ignore'|'raise'`.
Returns
-------
None : method directly changes calling object
Raises
------
ValueError
* When `errors='raise'` and there's overlapping non-NA data.
* When `errors` is not either `'ignore'` or `'raise'`
NotImplementedError
* If `join != 'left'`
See Also
--------
dict.update : Similar method for dictionaries.
DataFrame.merge : For column(s)-on-columns(s) operations.
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2, 3],
... 'B': [400, 500, 600]})
>>> new_df = pd.DataFrame({'B': [4, 5, 6],
... 'C': [7, 8, 9]})
>>> df.update(new_df)
>>> df
A B
0 1 4
1 2 5
2 3 6
The DataFrame's length does not increase as a result of the update,
only values at matching index/column labels are updated.
>>> df = pd.DataFrame({'A': ['a', 'b', 'c'],
... 'B': ['x', 'y', 'z']})
>>> new_df = pd.DataFrame({'B': ['d', 'e', 'f', 'g', 'h', 'i']})
>>> df.update(new_df)
>>> df
A B
0 a d
1 b e
2 c f
For Series, it's name attribute must be set.
>>> df = pd.DataFrame({'A': ['a', 'b', 'c'],
... 'B': ['x', 'y', 'z']})
>>> new_column = pd.Series(['d', 'e'], name='B', index=[0, 2])
>>> df.update(new_column)
>>> df
A B
0 a d
1 b y
2 c e
>>> df = pd.DataFrame({'A': ['a', 'b', 'c'],
... 'B': ['x', 'y', 'z']})
>>> new_df = pd.DataFrame({'B': ['d', 'e']}, index=[1, 2])
>>> df.update(new_df)
>>> df
A B
0 a x
1 b d
2 c e
If `other` contains NaNs the corresponding values are not updated
in the original dataframe.
>>> df = pd.DataFrame({'A': [1, 2, 3],
... 'B': [400, 500, 600]})
>>> new_df = pd.DataFrame({'B': [4, np.nan, 6]})
>>> df.update(new_df)
>>> df
A B
0 1 4.0
1 2 500.0
2 3 6.0
"""
import pandas.core.computation.expressions as expressions
# TODO: Support other joins
if join != "left": # pragma: no cover
raise NotImplementedError("Only left join is supported")
if errors not in ["ignore", "raise"]:
raise ValueError("The parameter errors must be either 'ignore' or 'raise'")
if not isinstance(other, DataFrame):
other = DataFrame(other)
other = other.reindex_like(self)
for col in self.columns:
this = self[col]._values
that = other[col]._values
if filter_func is not None:
with np.errstate(all="ignore"):
mask = ~filter_func(this) | isna(that)
else:
if errors == "raise":
mask_this = notna(that)
mask_that = notna(this)
if any(mask_this & mask_that):
raise ValueError("Data overlaps.")
if overwrite:
mask = isna(that)
else:
mask = notna(this)
# don't overwrite columns unnecessarily
if mask.all():
continue
self[col] = expressions.where(mask, this, that)
# ----------------------------------------------------------------------
# Data reshaping
_shared_docs[
"pivot"
] = """
Return reshaped DataFrame organized by given index / column values.
Reshape data (produce a "pivot" table) based on column values. Uses
unique values from specified `index` / `columns` to form axes of the
resulting DataFrame. This function does not support data
aggregation, multiple values will result in a MultiIndex in the
columns. See the :ref:`User Guide <reshaping>` for more on reshaping.
Parameters
----------%s
index : str or object, optional
Column to use to make new frame's index. If None, uses
existing index.
columns : str or object
Column to use to make new frame's columns.
values : str, object or a list of the previous, optional
Column(s) to use for populating new frame's values. If not
specified, all remaining columns will be used and the result will
have hierarchically indexed columns.
.. versionchanged:: 0.23.0
Also accept list of column names.
Returns
-------
DataFrame
Returns reshaped DataFrame.
Raises
------
ValueError:
When there are any `index`, `columns` combinations with multiple
values. `DataFrame.pivot_table` when you need to aggregate.
See Also
--------
DataFrame.pivot_table : Generalization of pivot that can handle
duplicate values for one index/column pair.
DataFrame.unstack : Pivot based on the index values instead of a
column.
Notes
-----
For finer-tuned control, see hierarchical indexing documentation along
with the related stack/unstack methods.
Examples
--------
>>> df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two',
... 'two'],
... 'bar': ['A', 'B', 'C', 'A', 'B', 'C'],
... 'baz': [1, 2, 3, 4, 5, 6],
... 'zoo': ['x', 'y', 'z', 'q', 'w', 't']})
>>> df
foo bar baz zoo
0 one A 1 x
1 one B 2 y
2 one C 3 z
3 two A 4 q
4 two B 5 w
5 two C 6 t
>>> df.pivot(index='foo', columns='bar', values='baz')
bar A B C
foo
one 1 2 3
two 4 5 6
>>> df.pivot(index='foo', columns='bar')['baz']
bar A B C
foo
one 1 2 3
two 4 5 6
>>> df.pivot(index='foo', columns='bar', values=['baz', 'zoo'])
baz zoo
bar A B C A B C
foo
one 1 2 3 x y z
two 4 5 6 q w t
A ValueError is raised if there are any duplicates.
>>> df = pd.DataFrame({"foo": ['one', 'one', 'two', 'two'],
... "bar": ['A', 'A', 'B', 'C'],
... "baz": [1, 2, 3, 4]})
>>> df
foo bar baz
0 one A 1
1 one A 2
2 two B 3
3 two C 4
Notice that the first two rows are the same for our `index`
and `columns` arguments.
>>> df.pivot(index='foo', columns='bar', values='baz')
Traceback (most recent call last):
...
ValueError: Index contains duplicate entries, cannot reshape
"""
@Substitution("")
@Appender(_shared_docs["pivot"])
def pivot(self, index=None, columns=None, values=None):
from pandas.core.reshape.pivot import pivot
return pivot(self, index=index, columns=columns, values=values)
_shared_docs[
"pivot_table"
] = """
Create a spreadsheet-style pivot table as a DataFrame.
The levels in the pivot table will be stored in MultiIndex objects
(hierarchical indexes) on the index and columns of the result DataFrame.
Parameters
----------%s
values : column to aggregate, optional
index : column, Grouper, array, or list of the previous
If an array is passed, it must be the same length as the data. The
list can contain any of the other types (except list).
Keys to group by on the pivot table index. If an array is passed,
it is being used as the same manner as column values.
columns : column, Grouper, array, or list of the previous
If an array is passed, it must be the same length as the data. The
list can contain any of the other types (except list).
Keys to group by on the pivot table column. If an array is passed,
it is being used as the same manner as column values.
aggfunc : function, list of functions, dict, default numpy.mean
If list of functions passed, the resulting pivot table will have
hierarchical columns whose top level are the function names
(inferred from the function objects themselves)
If dict is passed, the key is column to aggregate and value
is function or list of functions.
fill_value : scalar, default None
Value to replace missing values with.
margins : bool, default False
Add all row / columns (e.g. for subtotal / grand totals).
dropna : bool, default True
Do not include columns whose entries are all NaN.
margins_name : str, default 'All'
Name of the row / column that will contain the totals
when margins is True.
observed : bool, default False
This only applies if any of the groupers are Categoricals.
If True: only show observed values for categorical groupers.
If False: show all values for categorical groupers.
.. versionchanged:: 0.25.0
Returns
-------
DataFrame
An Excel style pivot table.
See Also
--------
DataFrame.pivot : Pivot without aggregation that can handle
non-numeric data.
Examples
--------
>>> df = pd.DataFrame({"A": ["foo", "foo", "foo", "foo", "foo",
... "bar", "bar", "bar", "bar"],
... "B": ["one", "one", "one", "two", "two",
... "one", "one", "two", "two"],
... "C": ["small", "large", "large", "small",
... "small", "large", "small", "small",
... "large"],
... "D": [1, 2, 2, 3, 3, 4, 5, 6, 7],
... "E": [2, 4, 5, 5, 6, 6, 8, 9, 9]})
>>> df
A B C D E
0 foo one small 1 2
1 foo one large 2 4
2 foo one large 2 5
3 foo two small 3 5
4 foo two small 3 6
5 bar one large 4 6
6 bar one small 5 8
7 bar two small 6 9
8 bar two large 7 9
This first example aggregates values by taking the sum.
>>> table = pd.pivot_table(df, values='D', index=['A', 'B'],
... columns=['C'], aggfunc=np.sum)
>>> table
C large small
A B
bar one 4.0 5.0
two 7.0 6.0
foo one 4.0 1.0
two NaN 6.0
We can also fill missing values using the `fill_value` parameter.
>>> table = pd.pivot_table(df, values='D', index=['A', 'B'],
... columns=['C'], aggfunc=np.sum, fill_value=0)
>>> table
C large small
A B
bar one 4 5
two 7 6
foo one 4 1
two 0 6
The next example aggregates by taking the mean across multiple columns.
>>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'],
... aggfunc={'D': np.mean,
... 'E': np.mean})
>>> table
D E
A C
bar large 5.500000 7.500000
small 5.500000 8.500000
foo large 2.000000 4.500000
small 2.333333 4.333333
We can also calculate multiple types of aggregations for any given
value column.
>>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'],
... aggfunc={'D': np.mean,
... 'E': [min, max, np.mean]})
>>> table
D E
mean max mean min
A C
bar large 5.500000 9.0 7.500000 6.0
small 5.500000 9.0 8.500000 8.0
foo large 2.000000 5.0 4.500000 4.0
small 2.333333 6.0 4.333333 2.0
"""
@Substitution("")
@Appender(_shared_docs["pivot_table"])
def pivot_table(
self,
values=None,
index=None,
columns=None,
aggfunc="mean",
fill_value=None,
margins=False,
dropna=True,
margins_name="All",
observed=False,
):
from pandas.core.reshape.pivot import pivot_table
return pivot_table(
self,
values=values,
index=index,
columns=columns,
aggfunc=aggfunc,
fill_value=fill_value,
margins=margins,
dropna=dropna,
margins_name=margins_name,
observed=observed,
)
def stack(self, level=-1, dropna=True):
"""
Stack the prescribed level(s) from columns to index.
Return a reshaped DataFrame or Series having a multi-level
index with one or more new inner-most levels compared to the current
DataFrame. The new inner-most levels are created by pivoting the
columns of the current dataframe:
- if the columns have a single level, the output is a Series;
- if the columns have multiple levels, the new index
level(s) is (are) taken from the prescribed level(s) and
the output is a DataFrame.
The new index levels are sorted.
Parameters
----------
level : int, str, list, default -1
Level(s) to stack from the column axis onto the index
axis, defined as one index or label, or a list of indices
or labels.
dropna : bool, default True
Whether to drop rows in the resulting Frame/Series with
missing values. Stacking a column level onto the index
axis can create combinations of index and column values
that are missing from the original dataframe. See Examples
section.
Returns
-------
DataFrame or Series
Stacked dataframe or series.
See Also
--------
DataFrame.unstack : Unstack prescribed level(s) from index axis
onto column axis.
DataFrame.pivot : Reshape dataframe from long format to wide
format.
DataFrame.pivot_table : Create a spreadsheet-style pivot table
as a DataFrame.
Notes
-----
The function is named by analogy with a collection of books
being reorganized from being side by side on a horizontal
position (the columns of the dataframe) to being stacked
vertically on top of each other (in the index of the
dataframe).
Examples
--------
**Single level columns**
>>> df_single_level_cols = pd.DataFrame([[0, 1], [2, 3]],
... index=['cat', 'dog'],
... columns=['weight', 'height'])
Stacking a dataframe with a single level column axis returns a Series:
>>> df_single_level_cols
weight height
cat 0 1
dog 2 3
>>> df_single_level_cols.stack()
cat weight 0
height 1
dog weight 2
height 3
dtype: int64
**Multi level columns: simple case**
>>> multicol1 = pd.MultiIndex.from_tuples([('weight', 'kg'),
... ('weight', 'pounds')])
>>> df_multi_level_cols1 = pd.DataFrame([[1, 2], [2, 4]],
... index=['cat', 'dog'],
... columns=multicol1)
Stacking a dataframe with a multi-level column axis:
>>> df_multi_level_cols1
weight
kg pounds
cat 1 2
dog 2 4
>>> df_multi_level_cols1.stack()
weight
cat kg 1
pounds 2
dog kg 2
pounds 4
**Missing values**
>>> multicol2 = pd.MultiIndex.from_tuples([('weight', 'kg'),
... ('height', 'm')])
>>> df_multi_level_cols2 = pd.DataFrame([[1.0, 2.0], [3.0, 4.0]],
... index=['cat', 'dog'],
... columns=multicol2)
It is common to have missing values when stacking a dataframe
with multi-level columns, as the stacked dataframe typically
has more values than the original dataframe. Missing values
are filled with NaNs:
>>> df_multi_level_cols2
weight height
kg m
cat 1.0 2.0
dog 3.0 4.0
>>> df_multi_level_cols2.stack()
height weight
cat kg NaN 1.0
m 2.0 NaN
dog kg NaN 3.0
m 4.0 NaN
**Prescribing the level(s) to be stacked**
The first parameter controls which level or levels are stacked:
>>> df_multi_level_cols2.stack(0)
kg m
cat height NaN 2.0
weight 1.0 NaN
dog height NaN 4.0
weight 3.0 NaN
>>> df_multi_level_cols2.stack([0, 1])
cat height m 2.0
weight kg 1.0
dog height m 4.0
weight kg 3.0
dtype: float64
**Dropping missing values**
>>> df_multi_level_cols3 = pd.DataFrame([[None, 1.0], [2.0, 3.0]],
... index=['cat', 'dog'],
... columns=multicol2)
Note that rows where all values are missing are dropped by
default but this behaviour can be controlled via the dropna
keyword parameter:
>>> df_multi_level_cols3
weight height
kg m
cat NaN 1.0
dog 2.0 3.0
>>> df_multi_level_cols3.stack(dropna=False)
height weight
cat kg NaN NaN
m 1.0 NaN
dog kg NaN 2.0
m 3.0 NaN
>>> df_multi_level_cols3.stack(dropna=True)
height weight
cat m 1.0 NaN
dog kg NaN 2.0
m 3.0 NaN
"""
from pandas.core.reshape.reshape import stack, stack_multiple
if isinstance(level, (tuple, list)):
return stack_multiple(self, level, dropna=dropna)
else:
return stack(self, level, dropna=dropna)
def explode(self, column: Union[str, Tuple]) -> "DataFrame":
"""
Transform each element of a list-like to a row, replicating index values.
.. versionadded:: 0.25.0
Parameters
----------
column : str or tuple
Column to explode.
Returns
-------
DataFrame
Exploded lists to rows of the subset columns;
index will be duplicated for these rows.
Raises
------
ValueError :
if columns of the frame are not unique.
See Also
--------
DataFrame.unstack : Pivot a level of the (necessarily hierarchical)
index labels.
DataFrame.melt : Unpivot a DataFrame from wide format to long format.
Series.explode : Explode a DataFrame from list-like columns to long format.
Notes
-----
This routine will explode list-likes including lists, tuples,
Series, and np.ndarray. The result dtype of the subset rows will
be object. Scalars will be returned unchanged. Empty list-likes will
result in a np.nan for that row.
Examples
--------
>>> df = pd.DataFrame({'A': [[1, 2, 3], 'foo', [], [3, 4]], 'B': 1})
>>> df
A B
0 [1, 2, 3] 1
1 foo 1
2 [] 1
3 [3, 4] 1
>>> df.explode('A')
A B
0 1 1
0 2 1
0 3 1
1 foo 1
2 NaN 1
3 3 1
3 4 1
"""
if not (is_scalar(column) or isinstance(column, tuple)):
raise ValueError("column must be a scalar")
if not self.columns.is_unique:
raise ValueError("columns must be unique")
df = self.reset_index(drop=True)
result = df[column].explode()
result = df.drop([column], axis=1).join(result)
result.index = self.index.take(result.index)
result = result.reindex(columns=self.columns, copy=False)
return result
def unstack(self, level=-1, fill_value=None):
"""
Pivot a level of the (necessarily hierarchical) index labels.
Returns a DataFrame having a new level of column labels whose inner-most level
consists of the pivoted index labels.
If the index is not a MultiIndex, the output will be a Series
(the analogue of stack when the columns are not a MultiIndex).
The level involved will automatically get sorted.
Parameters
----------
level : int, str, or list of these, default -1 (last level)
Level(s) of index to unstack, can pass level name.
fill_value : int, str or dict
Replace NaN with this value if the unstack produces missing values.
Returns
-------
Series or DataFrame
See Also
--------
DataFrame.pivot : Pivot a table based on column values.
DataFrame.stack : Pivot a level of the column labels (inverse operation
from `unstack`).
Examples
--------
>>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'),
... ('two', 'a'), ('two', 'b')])
>>> s = pd.Series(np.arange(1.0, 5.0), index=index)
>>> s
one a 1.0
b 2.0
two a 3.0
b 4.0
dtype: float64
>>> s.unstack(level=-1)
a b
one 1.0 2.0
two 3.0 4.0
>>> s.unstack(level=0)
one two
a 1.0 3.0
b 2.0 4.0
>>> df = s.unstack(level=0)
>>> df.unstack()
one a 1.0
b 2.0
two a 3.0
b 4.0
dtype: float64
"""
from pandas.core.reshape.reshape import unstack
return unstack(self, level, fill_value)
_shared_docs[
"melt"
] = """
Unpivot a DataFrame from wide to long format, optionally leaving identifiers set.
This function is useful to massage a DataFrame into a format where one
or more columns are identifier variables (`id_vars`), while all other
columns, considered measured variables (`value_vars`), are "unpivoted" to
the row axis, leaving just two non-identifier columns, 'variable' and
'value'.
%(versionadded)s
Parameters
----------
id_vars : tuple, list, or ndarray, optional
Column(s) to use as identifier variables.
value_vars : tuple, list, or ndarray, optional
Column(s) to unpivot. If not specified, uses all columns that
are not set as `id_vars`.
var_name : scalar
Name to use for the 'variable' column. If None it uses
``frame.columns.name`` or 'variable'.
value_name : scalar, default 'value'
Name to use for the 'value' column.
col_level : int or str, optional
If columns are a MultiIndex then use this level to melt.
Returns
-------
DataFrame
Unpivoted DataFrame.
See Also
--------
%(other)s
pivot_table
DataFrame.pivot
Series.explode
Examples
--------
>>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'},
... 'B': {0: 1, 1: 3, 2: 5},
... 'C': {0: 2, 1: 4, 2: 6}})
>>> df
A B C
0 a 1 2
1 b 3 4
2 c 5 6
>>> %(caller)sid_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> %(caller)sid_vars=['A'], value_vars=['B', 'C'])
A variable value
0 a B 1
1 b B 3
2 c B 5
3 a C 2
4 b C 4
5 c C 6
The names of 'variable' and 'value' columns can be customized:
>>> %(caller)sid_vars=['A'], value_vars=['B'],
... var_name='myVarname', value_name='myValname')
A myVarname myValname
0 a B 1
1 b B 3
2 c B 5
If you have multi-index columns:
>>> df.columns = [list('ABC'), list('DEF')]
>>> df
A B C
D E F
0 a 1 2
1 b 3 4
2 c 5 6
>>> %(caller)scol_level=0, id_vars=['A'], value_vars=['B'])
A variable value
0 a B 1
1 b B 3
2 c B 5
>>> %(caller)sid_vars=[('A', 'D')], value_vars=[('B', 'E')])
(A, D) variable_0 variable_1 value
0 a B E 1
1 b B E 3
2 c B E 5
"""
@Appender(
_shared_docs["melt"]
% dict(
caller="df.melt(", versionadded=".. versionadded:: 0.20.0\n", other="melt"
)
)
def melt(
self,
id_vars=None,
value_vars=None,
var_name=None,
value_name="value",
col_level=None,
):
from pandas.core.reshape.melt import melt
return melt(
self,
id_vars=id_vars,
value_vars=value_vars,
var_name=var_name,
value_name=value_name,
col_level=col_level,
)
# ----------------------------------------------------------------------
# Time series-related
def diff(self, periods=1, axis=0):
"""
First discrete difference of element.
Calculates the difference of a DataFrame element compared with another
element in the DataFrame (default is the element in the same column
of the previous row).
Parameters
----------
periods : int, default 1
Periods to shift for calculating difference, accepts negative
values.
axis : {0 or 'index', 1 or 'columns'}, default 0
Take difference over rows (0) or columns (1).
Returns
-------
DataFrame
See Also
--------
Series.diff: First discrete difference for a Series.
DataFrame.pct_change: Percent change over given number of periods.
DataFrame.shift: Shift index by desired number of periods with an
optional time freq.
Examples
--------
Difference with previous row
>>> df = pd.DataFrame({'a': [1, 2, 3, 4, 5, 6],
... 'b': [1, 1, 2, 3, 5, 8],
... 'c': [1, 4, 9, 16, 25, 36]})
>>> df
a b c
0 1 1 1
1 2 1 4
2 3 2 9
3 4 3 16
4 5 5 25
5 6 8 36
>>> df.diff()
a b c
0 NaN NaN NaN
1 1.0 0.0 3.0
2 1.0 1.0 5.0
3 1.0 1.0 7.0
4 1.0 2.0 9.0
5 1.0 3.0 11.0
Difference with previous column
>>> df.diff(axis=1)
a b c
0 NaN 0.0 0.0
1 NaN -1.0 3.0
2 NaN -1.0 7.0
3 NaN -1.0 13.0
4 NaN 0.0 20.0
5 NaN 2.0 28.0
Difference with 3rd previous row
>>> df.diff(periods=3)
a b c
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 3.0 2.0 15.0
4 3.0 4.0 21.0
5 3.0 6.0 27.0
Difference with following row
>>> df.diff(periods=-1)
a b c
0 -1.0 0.0 -3.0
1 -1.0 -1.0 -5.0
2 -1.0 -1.0 -7.0
3 -1.0 -2.0 -9.0
4 -1.0 -3.0 -11.0
5 NaN NaN NaN
"""
bm_axis = self._get_block_manager_axis(axis)
new_data = self._data.diff(n=periods, axis=bm_axis)
return self._constructor(new_data)
# ----------------------------------------------------------------------
# Function application
def _gotitem(
self,
key: Union[str, List[str]],
ndim: int,
subset: Optional[Union[Series, ABCDataFrame]] = None,
) -> Union[Series, ABCDataFrame]:
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : string / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
if subset is None:
subset = self
elif subset.ndim == 1: # is Series
return subset
# TODO: _shallow_copy(subset)?
return subset[key]
_agg_summary_and_see_also_doc = dedent(
"""
The aggregation operations are always performed over an axis, either the
index (default) or the column axis. This behavior is different from
`numpy` aggregation functions (`mean`, `median`, `prod`, `sum`, `std`,
`var`), where the default is to compute the aggregation of the flattened
array, e.g., ``numpy.mean(arr_2d)`` as opposed to
``numpy.mean(arr_2d, axis=0)``.
`agg` is an alias for `aggregate`. Use the alias.
See Also
--------
DataFrame.apply : Perform any type of operations.
DataFrame.transform : Perform transformation type operations.
core.groupby.GroupBy : Perform operations over groups.
core.resample.Resampler : Perform operations over resampled bins.
core.window.Rolling : Perform operations over rolling window.
core.window.Expanding : Perform operations over expanding window.
core.window.EWM : Perform operation over exponential weighted
window.
"""
)
_agg_examples_doc = dedent(
"""
Examples
--------
>>> df = pd.DataFrame([[1, 2, 3],
... [4, 5, 6],
... [7, 8, 9],
... [np.nan, np.nan, np.nan]],
... columns=['A', 'B', 'C'])
Aggregate these functions over the rows.
>>> df.agg(['sum', 'min'])
A B C
sum 12.0 15.0 18.0
min 1.0 2.0 3.0
Different aggregations per column.
>>> df.agg({'A' : ['sum', 'min'], 'B' : ['min', 'max']})
A B
max NaN 8.0
min 1.0 2.0
sum 12.0 NaN
Aggregate over the columns.
>>> df.agg("mean", axis="columns")
0 2.0
1 5.0
2 8.0
3 NaN
dtype: float64
"""
)
@Substitution(
see_also=_agg_summary_and_see_also_doc,
examples=_agg_examples_doc,
versionadded="\n.. versionadded:: 0.20.0\n",
**_shared_doc_kwargs,
)
@Appender(_shared_docs["aggregate"])
def aggregate(self, func, axis=0, *args, **kwargs):
axis = self._get_axis_number(axis)
result = None
try:
result, how = self._aggregate(func, axis=axis, *args, **kwargs)
except TypeError:
pass
if result is None:
return self.apply(func, axis=axis, args=args, **kwargs)
return result
def _aggregate(self, arg, axis=0, *args, **kwargs):
if axis == 1:
# NDFrame.aggregate returns a tuple, and we need to transpose
# only result
result, how = self.T._aggregate(arg, *args, **kwargs)
result = result.T if result is not None else result
return result, how
return super()._aggregate(arg, *args, **kwargs)
agg = aggregate
@Appender(_shared_docs["transform"] % _shared_doc_kwargs)
def transform(self, func, axis=0, *args, **kwargs):
axis = self._get_axis_number(axis)
if axis == 1:
return self.T.transform(func, *args, **kwargs).T
return super().transform(func, *args, **kwargs)
def apply(self, func, axis=0, raw=False, result_type=None, args=(), **kwds):
"""
Apply a function along an axis of the DataFrame.
Objects passed to the function are Series objects whose index is
either the DataFrame's index (``axis=0``) or the DataFrame's columns
(``axis=1``). By default (``result_type=None``), the final return type
is inferred from the return type of the applied function. Otherwise,
it depends on the `result_type` argument.
Parameters
----------
func : function
Function to apply to each column or row.
axis : {0 or 'index', 1 or 'columns'}, default 0
Axis along which the function is applied:
* 0 or 'index': apply function to each column.
* 1 or 'columns': apply function to each row.
raw : bool, default False
Determines if row or column is passed as a Series or ndarray object:
* ``False`` : passes each row or column as a Series to the
function.
* ``True`` : the passed function will receive ndarray objects
instead.
If you are just applying a NumPy reduction function this will
achieve much better performance.
result_type : {'expand', 'reduce', 'broadcast', None}, default None
These only act when ``axis=1`` (columns):
* 'expand' : list-like results will be turned into columns.
* 'reduce' : returns a Series if possible rather than expanding
list-like results. This is the opposite of 'expand'.
* 'broadcast' : results will be broadcast to the original shape
of the DataFrame, the original index and columns will be
retained.
The default behaviour (None) depends on the return value of the
applied function: list-like results will be returned as a Series
of those. However if the apply function returns a Series these
are expanded to columns.
.. versionadded:: 0.23.0
args : tuple
Positional arguments to pass to `func` in addition to the
array/series.
**kwds
Additional keyword arguments to pass as keywords arguments to
`func`.
Returns
-------
Series or DataFrame
Result of applying ``func`` along the given axis of the
DataFrame.
See Also
--------
DataFrame.applymap: For elementwise operations.
DataFrame.aggregate: Only perform aggregating type operations.
DataFrame.transform: Only perform transforming type operations.
Examples
--------
>>> df = pd.DataFrame([[4, 9]] * 3, columns=['A', 'B'])
>>> df
A B
0 4 9
1 4 9
2 4 9
Using a numpy universal function (in this case the same as
``np.sqrt(df)``):
>>> df.apply(np.sqrt)
A B
0 2.0 3.0
1 2.0 3.0
2 2.0 3.0
Using a reducing function on either axis
>>> df.apply(np.sum, axis=0)
A 12
B 27
dtype: int64
>>> df.apply(np.sum, axis=1)
0 13
1 13
2 13
dtype: int64
Returning a list-like will result in a Series
>>> df.apply(lambda x: [1, 2], axis=1)
0 [1, 2]
1 [1, 2]
2 [1, 2]
dtype: object
Passing result_type='expand' will expand list-like results
to columns of a Dataframe
>>> df.apply(lambda x: [1, 2], axis=1, result_type='expand')
0 1
0 1 2
1 1 2
2 1 2
Returning a Series inside the function is similar to passing
``result_type='expand'``. The resulting column names
will be the Series index.
>>> df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1)
foo bar
0 1 2
1 1 2
2 1 2
Passing ``result_type='broadcast'`` will ensure the same shape
result, whether list-like or scalar is returned by the function,
and broadcast it along the axis. The resulting column names will
be the originals.
>>> df.apply(lambda x: [1, 2], axis=1, result_type='broadcast')
A B
0 1 2
1 1 2
2 1 2
"""
from pandas.core.apply import frame_apply
op = frame_apply(
self,
func=func,
axis=axis,
raw=raw,
result_type=result_type,
args=args,
kwds=kwds,
)
return op.get_result()
def applymap(self, func):
"""
Apply a function to a Dataframe elementwise.
This method applies a function that accepts and returns a scalar
to every element of a DataFrame.
Parameters
----------
func : callable
Python function, returns a single value from a single value.
Returns
-------
DataFrame
Transformed DataFrame.
See Also
--------
DataFrame.apply : Apply a function along input axis of DataFrame.
Notes
-----
In the current implementation applymap calls `func` twice on the
first column/row to decide whether it can take a fast or slow
code path. This can lead to unexpected behavior if `func` has
side-effects, as they will take effect twice for the first
column/row.
Examples
--------
>>> df = pd.DataFrame([[1, 2.12], [3.356, 4.567]])
>>> df
0 1
0 1.000 2.120
1 3.356 4.567
>>> df.applymap(lambda x: len(str(x)))
0 1
0 3 4
1 5 5
Note that a vectorized version of `func` often exists, which will
be much faster. You could square each number elementwise.
>>> df.applymap(lambda x: x**2)
0 1
0 1.000000 4.494400
1 11.262736 20.857489
But it's better to avoid applymap in that case.
>>> df ** 2
0 1
0 1.000000 4.494400
1 11.262736 20.857489
"""
# if we have a dtype == 'M8[ns]', provide boxed values
def infer(x):
if x.empty:
return lib.map_infer(x, func)
return lib.map_infer(x.astype(object).values, func)
return self.apply(infer)
# ----------------------------------------------------------------------
# Merging / joining methods
def append(self, other, ignore_index=False, verify_integrity=False, sort=None):
"""
Append rows of `other` to the end of caller, returning a new object.
Columns in `other` that are not in the caller are added as new columns.
Parameters
----------
other : DataFrame or Series/dict-like object, or list of these
The data to append.
ignore_index : bool, default False
If True, do not use the index labels.
verify_integrity : bool, default False
If True, raise ValueError on creating index with duplicates.
sort : bool, default None
Sort columns if the columns of `self` and `other` are not aligned.
The default sorting is deprecated and will change to not-sorting
in a future version of pandas. Explicitly pass ``sort=True`` to
silence the warning and sort. Explicitly pass ``sort=False`` to
silence the warning and not sort.
.. versionadded:: 0.23.0
Returns
-------
DataFrame
See Also
--------
concat : General function to concatenate DataFrame or Series objects.
Notes
-----
If a list of dict/series is passed and the keys are all contained in
the DataFrame's index, the order of the columns in the resulting
DataFrame will be unchanged.
Iteratively appending rows to a DataFrame can be more computationally
intensive than a single concatenate. A better solution is to append
those rows to a list and then concatenate the list with the original
DataFrame all at once.
Examples
--------
>>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB'))
>>> df
A B
0 1 2
1 3 4
>>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB'))
>>> df.append(df2)
A B
0 1 2
1 3 4
0 5 6
1 7 8
With `ignore_index` set to True:
>>> df.append(df2, ignore_index=True)
A B
0 1 2
1 3 4
2 5 6
3 7 8
The following, while not recommended methods for generating DataFrames,
show two ways to generate a DataFrame from multiple data sources.
Less efficient:
>>> df = pd.DataFrame(columns=['A'])
>>> for i in range(5):
... df = df.append({'A': i}, ignore_index=True)
>>> df
A
0 0
1 1
2 2
3 3
4 4
More efficient:
>>> pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)],
... ignore_index=True)
A
0 0
1 1
2 2
3 3
4 4
"""
if isinstance(other, (Series, dict)):
if isinstance(other, dict):
other = Series(other)
if other.name is None and not ignore_index:
raise TypeError(
"Can only append a Series if ignore_index=True"
" or if the Series has a name"
)
if other.name is None:
index = None
else:
# other must have the same index name as self, otherwise
# index name will be reset
index = Index([other.name], name=self.index.name)
idx_diff = other.index.difference(self.columns)
try:
combined_columns = self.columns.append(idx_diff)
except TypeError:
combined_columns = self.columns.astype(object).append(idx_diff)
other = other.reindex(combined_columns, copy=False)
other = DataFrame(
other.values.reshape((1, len(other))),
index=index,
columns=combined_columns,
)
other = other._convert(datetime=True, timedelta=True)
if not self.columns.equals(combined_columns):
self = self.reindex(columns=combined_columns)
elif isinstance(other, list):
if not other:
pass
elif not isinstance(other[0], DataFrame):
other = DataFrame(other)
if (self.columns.get_indexer(other.columns) >= 0).all():
other = other.reindex(columns=self.columns)
from pandas.core.reshape.concat import concat
if isinstance(other, (list, tuple)):
to_concat = [self] + other
else:
to_concat = [self, other]
return concat(
to_concat,
ignore_index=ignore_index,
verify_integrity=verify_integrity,
sort=sort,
)
def join(self, other, on=None, how="left", lsuffix="", rsuffix="", sort=False):
"""
Join columns of another DataFrame.
Join columns with `other` DataFrame either on index or on a key
column. Efficiently join multiple DataFrame objects by index at once by
passing a list.
Parameters
----------
other : DataFrame, Series, or list of DataFrame
Index should be similar to one of the columns in this one. If a
Series is passed, its name attribute must be set, and that will be
used as the column name in the resulting joined DataFrame.
on : str, list of str, or array-like, optional
Column or index level name(s) in the caller to join on the index
in `other`, otherwise joins index-on-index. If multiple
values given, the `other` DataFrame must have a MultiIndex. Can
pass an array as the join key if it is not already contained in
the calling DataFrame. Like an Excel VLOOKUP operation.
how : {'left', 'right', 'outer', 'inner'}, default 'left'
How to handle the operation of the two objects.
* left: use calling frame's index (or column if on is specified)
* right: use `other`'s index.
* outer: form union of calling frame's index (or column if on is
specified) with `other`'s index, and sort it.
lexicographically.
* inner: form intersection of calling frame's index (or column if
on is specified) with `other`'s index, preserving the order
of the calling's one.
lsuffix : str, default ''
Suffix to use from left frame's overlapping columns.
rsuffix : str, default ''
Suffix to use from right frame's overlapping columns.
sort : bool, default False
Order result DataFrame lexicographically by the join key. If False,
the order of the join key depends on the join type (how keyword).
Returns
-------
DataFrame
A dataframe containing columns from both the caller and `other`.
See Also
--------
DataFrame.merge : For column(s)-on-columns(s) operations.
Notes
-----
Parameters `on`, `lsuffix`, and `rsuffix` are not supported when
passing a list of `DataFrame` objects.
Support for specifying index levels as the `on` parameter was added
in version 0.23.0.
Examples
--------
>>> df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'],
... 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']})
>>> df
key A
0 K0 A0
1 K1 A1
2 K2 A2
3 K3 A3
4 K4 A4
5 K5 A5
>>> other = pd.DataFrame({'key': ['K0', 'K1', 'K2'],
... 'B': ['B0', 'B1', 'B2']})
>>> other
key B
0 K0 B0
1 K1 B1
2 K2 B2
Join DataFrames using their indexes.
>>> df.join(other, lsuffix='_caller', rsuffix='_other')
key_caller A key_other B
0 K0 A0 K0 B0
1 K1 A1 K1 B1
2 K2 A2 K2 B2
3 K3 A3 NaN NaN
4 K4 A4 NaN NaN
5 K5 A5 NaN NaN
If we want to join using the key columns, we need to set key to be
the index in both `df` and `other`. The joined DataFrame will have
key as its index.
>>> df.set_index('key').join(other.set_index('key'))
A B
key
K0 A0 B0
K1 A1 B1
K2 A2 B2
K3 A3 NaN
K4 A4 NaN
K5 A5 NaN
Another option to join using the key columns is to use the `on`
parameter. DataFrame.join always uses `other`'s index but we can use
any column in `df`. This method preserves the original DataFrame's
index in the result.
>>> df.join(other.set_index('key'), on='key')
key A B
0 K0 A0 B0
1 K1 A1 B1
2 K2 A2 B2
3 K3 A3 NaN
4 K4 A4 NaN
5 K5 A5 NaN
"""
return self._join_compat(
other, on=on, how=how, lsuffix=lsuffix, rsuffix=rsuffix, sort=sort
)
def _join_compat(
self, other, on=None, how="left", lsuffix="", rsuffix="", sort=False
):
from pandas.core.reshape.merge import merge
from pandas.core.reshape.concat import concat
if isinstance(other, Series):
if other.name is None:
raise ValueError("Other Series must have a name")
other = DataFrame({other.name: other})
if isinstance(other, DataFrame):
return merge(
self,
other,
left_on=on,
how=how,
left_index=on is None,
right_index=True,
suffixes=(lsuffix, rsuffix),
sort=sort,
)
else:
if on is not None:
raise ValueError(
"Joining multiple DataFrames only supported for joining on index"
)
frames = [self] + list(other)
can_concat = all(df.index.is_unique for df in frames)
# join indexes only using concat
if can_concat:
if how == "left":
res = concat(
frames, axis=1, join="outer", verify_integrity=True, sort=sort
)
return res.reindex(self.index, copy=False)
else:
return concat(
frames, axis=1, join=how, verify_integrity=True, sort=sort
)
joined = frames[0]
for frame in frames[1:]:
joined = merge(
joined, frame, how=how, left_index=True, right_index=True
)
return joined
@Substitution("")
@Appender(_merge_doc, indents=2)
def merge(
self,
right,
how="inner",
on=None,
left_on=None,
right_on=None,
left_index=False,
right_index=False,
sort=False,
suffixes=("_x", "_y"),
copy=True,
indicator=False,
validate=None,
):
from pandas.core.reshape.merge import merge
return merge(
self,
right,
how=how,
on=on,
left_on=left_on,
right_on=right_on,
left_index=left_index,
right_index=right_index,
sort=sort,
suffixes=suffixes,
copy=copy,
indicator=indicator,
validate=validate,
)
def round(self, decimals=0, *args, **kwargs):
"""
Round a DataFrame to a variable number of decimal places.
Parameters
----------
decimals : int, dict, Series
Number of decimal places to round each column to. If an int is
given, round each column to the same number of places.
Otherwise dict and Series round to variable numbers of places.
Column names should be in the keys if `decimals` is a
dict-like, or in the index if `decimals` is a Series. Any
columns not included in `decimals` will be left as is. Elements
of `decimals` which are not columns of the input will be
ignored.
*args
Additional keywords have no effect but might be accepted for
compatibility with numpy.
**kwargs
Additional keywords have no effect but might be accepted for
compatibility with numpy.
Returns
-------
DataFrame
A DataFrame with the affected columns rounded to the specified
number of decimal places.
See Also
--------
numpy.around : Round a numpy array to the given number of decimals.
Series.round : Round a Series to the given number of decimals.
Examples
--------
>>> df = pd.DataFrame([(.21, .32), (.01, .67), (.66, .03), (.21, .18)],
... columns=['dogs', 'cats'])
>>> df
dogs cats
0 0.21 0.32
1 0.01 0.67
2 0.66 0.03
3 0.21 0.18
By providing an integer each column is rounded to the same number
of decimal places
>>> df.round(1)
dogs cats
0 0.2 0.3
1 0.0 0.7
2 0.7 0.0
3 0.2 0.2
With a dict, the number of places for specific columns can be
specified with the column names as key and the number of decimal
places as value
>>> df.round({'dogs': 1, 'cats': 0})
dogs cats
0 0.2 0.0
1 0.0 1.0
2 0.7 0.0
3 0.2 0.0
Using a Series, the number of places for specific columns can be
specified with the column names as index and the number of
decimal places as value
>>> decimals = pd.Series([0, 1], index=['cats', 'dogs'])
>>> df.round(decimals)
dogs cats
0 0.2 0.0
1 0.0 1.0
2 0.7 0.0
3 0.2 0.0
"""
from pandas.core.reshape.concat import concat
def _dict_round(df, decimals):
for col, vals in df.items():
try:
yield _series_round(vals, decimals[col])
except KeyError:
yield vals
def _series_round(s, decimals):
if is_integer_dtype(s) or is_float_dtype(s):
return s.round(decimals)
return s
nv.validate_round(args, kwargs)
if isinstance(decimals, (dict, Series)):
if isinstance(decimals, Series):
if not decimals.index.is_unique:
raise ValueError("Index of decimals must be unique")
new_cols = list(_dict_round(self, decimals))
elif is_integer(decimals):
# Dispatch to Series.round
new_cols = [_series_round(v, decimals) for _, v in self.items()]
else:
raise TypeError("decimals must be an integer, a dict-like or a Series")
if len(new_cols) > 0:
return self._constructor(
concat(new_cols, axis=1), index=self.index, columns=self.columns
)
else:
return self
# ----------------------------------------------------------------------
# Statistical methods, etc.
def corr(self, method="pearson", min_periods=1):
"""
Compute pairwise correlation of columns, excluding NA/null values.
Parameters
----------
method : {'pearson', 'kendall', 'spearman'} or callable
Method of correlation:
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
* callable: callable with input two 1d ndarrays
and returning a float. Note that the returned matrix from corr
will have 1 along the diagonals and will be symmetric
regardless of the callable's behavior.
.. versionadded:: 0.24.0
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result. Currently only available for Pearson
and Spearman correlation.
Returns
-------
DataFrame
Correlation matrix.
See Also
--------
DataFrame.corrwith
Series.corr
Examples
--------
>>> def histogram_intersection(a, b):
... v = np.minimum(a, b).sum().round(decimals=1)
... return v
>>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.corr(method=histogram_intersection)
dogs cats
dogs 1.0 0.3
cats 0.3 1.0
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
idx = cols.copy()
mat = numeric_df.values
if method == "pearson":
correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods)
elif method == "spearman":
correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods)
elif method == "kendall" or callable(method):
if min_periods is None:
min_periods = 1
mat = ensure_float64(mat).T
corrf = nanops.get_corr_func(method)
K = len(cols)
correl = np.empty((K, K), dtype=float)
mask = np.isfinite(mat)
for i, ac in enumerate(mat):
for j, bc in enumerate(mat):
if i > j:
continue
valid = mask[i] & mask[j]
if valid.sum() < min_periods:
c = np.nan
elif i == j:
c = 1.0
elif not valid.all():
c = corrf(ac[valid], bc[valid])
else:
c = corrf(ac, bc)
correl[i, j] = c
correl[j, i] = c
else:
raise ValueError(
"method must be either 'pearson', "
"'spearman', 'kendall', or a callable, "
"'{method}' was supplied".format(method=method)
)
return self._constructor(correl, index=idx, columns=cols)
def cov(self, min_periods=None):
"""
Compute pairwise covariance of columns, excluding NA/null values.
Compute the pairwise covariance among the series of a DataFrame.
The returned data frame is the `covariance matrix
<https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns
of the DataFrame.
Both NA and null values are automatically excluded from the
calculation. (See the note below about bias from missing values.)
A threshold can be set for the minimum number of
observations for each value created. Comparisons with observations
below this threshold will be returned as ``NaN``.
This method is generally used for the analysis of time series data to
understand the relationship between different measures
across time.
Parameters
----------
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result.
Returns
-------
DataFrame
The covariance matrix of the series of the DataFrame.
See Also
--------
Series.cov : Compute covariance with another Series.
core.window.EWM.cov: Exponential weighted sample covariance.
core.window.Expanding.cov : Expanding sample covariance.
core.window.Rolling.cov : Rolling sample covariance.
Notes
-----
Returns the covariance matrix of the DataFrame's time series.
The covariance is normalized by N-1.
For DataFrames that have Series that are missing data (assuming that
data is `missing at random
<https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__)
the returned covariance matrix will be an unbiased estimate
of the variance and covariance between the member Series.
However, for many applications this estimate may not be acceptable
because the estimate covariance matrix is not guaranteed to be positive
semi-definite. This could lead to estimate correlations having
absolute values which are greater than one, and/or a non-invertible
covariance matrix. See `Estimation of covariance matrices
<http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_
matrices>`__ for more details.
Examples
--------
>>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)],
... columns=['dogs', 'cats'])
>>> df.cov()
dogs cats
dogs 0.666667 -1.000000
cats -1.000000 1.666667
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(1000, 5),
... columns=['a', 'b', 'c', 'd', 'e'])
>>> df.cov()
a b c d e
a 0.998438 -0.020161 0.059277 -0.008943 0.014144
b -0.020161 1.059352 -0.008543 -0.024738 0.009826
c 0.059277 -0.008543 1.010670 -0.001486 -0.000271
d -0.008943 -0.024738 -0.001486 0.921297 -0.013692
e 0.014144 0.009826 -0.000271 -0.013692 0.977795
**Minimum number of periods**
This method also supports an optional ``min_periods`` keyword
that specifies the required minimum number of non-NA observations for
each column pair in order to have a valid result:
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(20, 3),
... columns=['a', 'b', 'c'])
>>> df.loc[df.index[:5], 'a'] = np.nan
>>> df.loc[df.index[5:10], 'b'] = np.nan
>>> df.cov(min_periods=12)
a b c
a 0.316741 NaN -0.150812
b NaN 1.248003 0.191417
c -0.150812 0.191417 0.895202
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
idx = cols.copy()
mat = numeric_df.values
if notna(mat).all():
if min_periods is not None and min_periods > len(mat):
baseCov = np.empty((mat.shape[1], mat.shape[1]))
baseCov.fill(np.nan)
else:
baseCov = np.cov(mat.T)
baseCov = baseCov.reshape((len(cols), len(cols)))
else:
baseCov = libalgos.nancorr(ensure_float64(mat), cov=True, minp=min_periods)
return self._constructor(baseCov, index=idx, columns=cols)
def corrwith(self, other, axis=0, drop=False, method="pearson"):
"""
Compute pairwise correlation.
Pairwise correlation is computed between rows or columns of
DataFrame with rows or columns of Series or DataFrame. DataFrames
are first aligned along both axes before computing the
correlations.
Parameters
----------
other : DataFrame, Series
Object with which to compute correlations.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to use. 0 or 'index' to compute column-wise, 1 or 'columns' for
row-wise.
drop : bool, default False
Drop missing indices from result.
method : {'pearson', 'kendall', 'spearman'} or callable
Method of correlation:
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
* callable: callable with input two 1d ndarrays
and returning a float.
.. versionadded:: 0.24.0
Returns
-------
Series
Pairwise correlations.
See Also
--------
DataFrame.corr
"""
axis = self._get_axis_number(axis)
this = self._get_numeric_data()
if isinstance(other, Series):
return this.apply(lambda x: other.corr(x, method=method), axis=axis)
other = other._get_numeric_data()
left, right = this.align(other, join="inner", copy=False)
if axis == 1:
left = left.T
right = right.T
if method == "pearson":
# mask missing values
left = left + right * 0
right = right + left * 0
# demeaned data
ldem = left - left.mean()
rdem = right - right.mean()
num = (ldem * rdem).sum()
dom = (left.count() - 1) * left.std() * right.std()
correl = num / dom
elif method in ["kendall", "spearman"] or callable(method):
def c(x):
return nanops.nancorr(x[0], x[1], method=method)
correl = Series(
map(c, zip(left.values.T, right.values.T)), index=left.columns
)
else:
raise ValueError(
"Invalid method {method} was passed, "
"valid methods are: 'pearson', 'kendall', "
"'spearman', or callable".format(method=method)
)
if not drop:
# Find non-matching labels along the given axis
# and append missing correlations (GH 22375)
raxis = 1 if axis == 0 else 0
result_index = this._get_axis(raxis).union(other._get_axis(raxis))
idx_diff = result_index.difference(correl.index)
if len(idx_diff) > 0:
correl = correl.append(Series([np.nan] * len(idx_diff), index=idx_diff))
return correl
# ----------------------------------------------------------------------
# ndarray-like stats methods
def count(self, axis=0, level=None, numeric_only=False):
"""
Count non-NA cells for each column or row.
The values `None`, `NaN`, `NaT`, and optionally `numpy.inf` (depending
on `pandas.options.mode.use_inf_as_na`) are considered NA.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
If 0 or 'index' counts are generated for each column.
If 1 or 'columns' counts are generated for each **row**.
level : int or str, optional
If the axis is a `MultiIndex` (hierarchical), count along a
particular `level`, collapsing into a `DataFrame`.
A `str` specifies the level name.
numeric_only : bool, default False
Include only `float`, `int` or `boolean` data.
Returns
-------
Series or DataFrame
For each column/row the number of non-NA/null entries.
If `level` is specified returns a `DataFrame`.
See Also
--------
Series.count: Number of non-NA elements in a Series.
DataFrame.shape: Number of DataFrame rows and columns (including NA
elements).
DataFrame.isna: Boolean same-sized DataFrame showing places of NA
elements.
Examples
--------
Constructing DataFrame from a dictionary:
>>> df = pd.DataFrame({"Person":
... ["John", "Myla", "Lewis", "John", "Myla"],
... "Age": [24., np.nan, 21., 33, 26],
... "Single": [False, True, True, True, False]})
>>> df
Person Age Single
0 John 24.0 False
1 Myla NaN True
2 Lewis 21.0 True
3 John 33.0 True
4 Myla 26.0 False
Notice the uncounted NA values:
>>> df.count()
Person 5
Age 4
Single 5
dtype: int64
Counts for each **row**:
>>> df.count(axis='columns')
0 3
1 2
2 3
3 3
4 3
dtype: int64
Counts for one level of a `MultiIndex`:
>>> df.set_index(["Person", "Single"]).count(level="Person")
Age
Person
John 2
Lewis 1
Myla 1
"""
axis = self._get_axis_number(axis)
if level is not None:
return self._count_level(level, axis=axis, numeric_only=numeric_only)
if numeric_only:
frame = self._get_numeric_data()
else:
frame = self
# GH #423
if len(frame._get_axis(axis)) == 0:
result = Series(0, index=frame._get_agg_axis(axis))
else:
if frame._is_mixed_type or frame._data.any_extension_types:
# the or any_extension_types is really only hit for single-
# column frames with an extension array
result = notna(frame).sum(axis=axis)
else:
# GH13407
series_counts = notna(frame).sum(axis=axis)
counts = series_counts.values
result = Series(counts, index=frame._get_agg_axis(axis))
return result.astype("int64")
def _count_level(self, level, axis=0, numeric_only=False):
if numeric_only:
frame = self._get_numeric_data()
else:
frame = self
count_axis = frame._get_axis(axis)
agg_axis = frame._get_agg_axis(axis)
if not isinstance(count_axis, ABCMultiIndex):
raise TypeError(
"Can only count levels on hierarchical "
"{ax}.".format(ax=self._get_axis_name(axis))
)
if frame._is_mixed_type:
# Since we have mixed types, calling notna(frame.values) might
# upcast everything to object
mask = notna(frame).values
else:
# But use the speedup when we have homogeneous dtypes
mask = notna(frame.values)
if axis == 1:
# We're transposing the mask rather than frame to avoid potential
# upcasts to object, which induces a ~20x slowdown
mask = mask.T
if isinstance(level, str):
level = count_axis._get_level_number(level)
level_name = count_axis._names[level]
level_index = count_axis.levels[level]._shallow_copy(name=level_name)
level_codes = ensure_int64(count_axis.codes[level])
counts = lib.count_level_2d(mask, level_codes, len(level_index), axis=0)
result = DataFrame(counts, index=level_index, columns=agg_axis)
if axis == 1:
# Undo our earlier transpose
return result.T
else:
return result
def _reduce(
self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds
):
if axis is None and filter_type == "bool":
labels = None
constructor = None
else:
# TODO: Make other agg func handle axis=None properly
axis = self._get_axis_number(axis)
labels = self._get_agg_axis(axis)
constructor = self._constructor
def f(x):
return op(x, axis=axis, skipna=skipna, **kwds)
def _get_data(axis_matters):
if filter_type is None or filter_type == "numeric":
data = self._get_numeric_data()
elif filter_type == "bool":
if axis_matters:
# GH#25101, GH#24434
data = self._get_bool_data() if axis == 0 else self
else:
data = self._get_bool_data()
else: # pragma: no cover
msg = (
"Generating numeric_only data with filter_type {f}"
"not supported.".format(f=filter_type)
)
raise NotImplementedError(msg)
return data
if numeric_only is None:
values = self.values
try:
result = f(values)
if filter_type == "bool" and is_object_dtype(values) and axis is None:
# work around https://github.com/numpy/numpy/issues/10489
# TODO: combine with hasattr(result, 'dtype') further down
# hard since we don't have `values` down there.
result = np.bool_(result)
except TypeError:
# e.g. in nanops trying to convert strs to float
# try by-column first
if filter_type is None and axis == 0:
# this can end up with a non-reduction
# but not always. if the types are mixed
# with datelike then need to make sure a series
# we only end up here if we have not specified
# numeric_only and yet we have tried a
# column-by-column reduction, where we have mixed type.
# So let's just do what we can
from pandas.core.apply import frame_apply
opa = frame_apply(
self, func=f, result_type="expand", ignore_failures=True
)
result = opa.get_result()
if result.ndim == self.ndim:
result = result.iloc[0]
return result
# TODO: why doesnt axis matter here?
data = _get_data(axis_matters=False)
with np.errstate(all="ignore"):
result = f(data.values)
labels = data._get_agg_axis(axis)
else:
if numeric_only:
data = _get_data(axis_matters=True)
values = data.values
labels = data._get_agg_axis(axis)
else:
values = self.values
result = f(values)
if hasattr(result, "dtype") and is_object_dtype(result.dtype):
try:
if filter_type is None or filter_type == "numeric":
result = result.astype(np.float64)
elif filter_type == "bool" and notna(result).all():
result = result.astype(np.bool_)
except (ValueError, TypeError):
# try to coerce to the original dtypes item by item if we can
if axis == 0:
result = coerce_to_dtypes(result, self.dtypes)
if constructor is not None:
result = Series(result, index=labels)
return result
def nunique(self, axis=0, dropna=True):
"""
Count distinct observations over requested axis.
Return Series with number of distinct observations. Can ignore NaN
values.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for
column-wise.
dropna : bool, default True
Don't include NaN in the counts.
Returns
-------
Series
See Also
--------
Series.nunique: Method nunique for Series.
DataFrame.count: Count non-NA cells for each column or row.
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [1, 1, 1]})
>>> df.nunique()
A 3
B 1
dtype: int64
>>> df.nunique(axis=1)
0 1
1 2
2 2
dtype: int64
"""
return self.apply(Series.nunique, axis=axis, dropna=dropna)
def idxmin(self, axis=0, skipna=True):
"""
Return index of first occurrence of minimum over requested axis.
NA/null values are excluded.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
Returns
-------
Series
Indexes of minima along the specified axis.
Raises
------
ValueError
* If the row/column is empty
See Also
--------
Series.idxmin
Notes
-----
This method is the DataFrame version of ``ndarray.argmin``.
"""
axis = self._get_axis_number(axis)
indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna)
index = self._get_axis(axis)
result = [index[i] if i >= 0 else np.nan for i in indices]
return Series(result, index=self._get_agg_axis(axis))
def idxmax(self, axis=0, skipna=True):
"""
Return index of first occurrence of maximum over requested axis.
NA/null values are excluded.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
Returns
-------
Series
Indexes of maxima along the specified axis.
Raises
------
ValueError
* If the row/column is empty
See Also
--------
Series.idxmax
Notes
-----
This method is the DataFrame version of ``ndarray.argmax``.
"""
axis = self._get_axis_number(axis)
indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna)
index = self._get_axis(axis)
result = [index[i] if i >= 0 else np.nan for i in indices]
return Series(result, index=self._get_agg_axis(axis))
def _get_agg_axis(self, axis_num):
"""
Let's be explicit about this.
"""
if axis_num == 0:
return self.columns
elif axis_num == 1:
return self.index
else:
raise ValueError("Axis must be 0 or 1 (got %r)" % axis_num)
def mode(self, axis=0, numeric_only=False, dropna=True):
"""
Get the mode(s) of each element along the selected axis.
The mode of a set of values is the value that appears most often.
It can be multiple values.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to iterate over while searching for the mode:
* 0 or 'index' : get mode of each column
* 1 or 'columns' : get mode of each row.
numeric_only : bool, default False
If True, only apply to numeric columns.
dropna : bool, default True
Don't consider counts of NaN/NaT.
.. versionadded:: 0.24.0
Returns
-------
DataFrame
The modes of each column or row.
See Also
--------
Series.mode : Return the highest frequency value in a Series.
Series.value_counts : Return the counts of values in a Series.
Examples
--------
>>> df = pd.DataFrame([('bird', 2, 2),
... ('mammal', 4, np.nan),
... ('arthropod', 8, 0),
... ('bird', 2, np.nan)],
... index=('falcon', 'horse', 'spider', 'ostrich'),
... columns=('species', 'legs', 'wings'))
>>> df
species legs wings
falcon bird 2 2.0
horse mammal 4 NaN
spider arthropod 8 0.0
ostrich bird 2 NaN
By default, missing values are not considered, and the mode of wings
are both 0 and 2. The second row of species and legs contains ``NaN``,
because they have only one mode, but the DataFrame has two rows.
>>> df.mode()
species legs wings
0 bird 2.0 0.0
1 NaN NaN 2.0
Setting ``dropna=False`` ``NaN`` values are considered and they can be
the mode (like for wings).
>>> df.mode(dropna=False)
species legs wings
0 bird 2 NaN
Setting ``numeric_only=True``, only the mode of numeric columns is
computed, and columns of other types are ignored.
>>> df.mode(numeric_only=True)
legs wings
0 2.0 0.0
1 NaN 2.0
To compute the mode over columns and not rows, use the axis parameter:
>>> df.mode(axis='columns', numeric_only=True)
0 1
falcon 2.0 NaN
horse 4.0 NaN
spider 0.0 8.0
ostrich 2.0 NaN
"""
data = self if not numeric_only else self._get_numeric_data()
def f(s):
return s.mode(dropna=dropna)
return data.apply(f, axis=axis)
def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation="linear"):
"""
Return values at the given quantile over requested axis.
Parameters
----------
q : float or array-like, default 0.5 (50% quantile)
Value between 0 <= q <= 1, the quantile(s) to compute.
axis : {0, 1, 'index', 'columns'} (default 0)
Equals 0 or 'index' for row-wise, 1 or 'columns' for column-wise.
numeric_only : bool, default True
If False, the quantile of datetime and timedelta data will be
computed as well.
interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'}
This optional parameter specifies the interpolation method to use,
when the desired quantile lies between two data points `i` and `j`:
* linear: `i + (j - i) * fraction`, where `fraction` is the
fractional part of the index surrounded by `i` and `j`.
* lower: `i`.
* higher: `j`.
* nearest: `i` or `j` whichever is nearest.
* midpoint: (`i` + `j`) / 2.
Returns
-------
Series or DataFrame
If ``q`` is an array, a DataFrame will be returned where the
index is ``q``, the columns are the columns of self, and the
values are the quantiles.
If ``q`` is a float, a Series will be returned where the
index is the columns of self and the values are the quantiles.
See Also
--------
core.window.Rolling.quantile: Rolling quantile.
numpy.percentile: Numpy function to compute the percentile.
Examples
--------
>>> df = pd.DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]),
... columns=['a', 'b'])
>>> df.quantile(.1)
a 1.3
b 3.7
Name: 0.1, dtype: float64
>>> df.quantile([.1, .5])
a b
0.1 1.3 3.7
0.5 2.5 55.0
Specifying `numeric_only=False` will also compute the quantile of
datetime and timedelta data.
>>> df = pd.DataFrame({'A': [1, 2],
... 'B': [pd.Timestamp('2010'),
... pd.Timestamp('2011')],
... 'C': [pd.Timedelta('1 days'),
... pd.Timedelta('2 days')]})
>>> df.quantile(0.5, numeric_only=False)
A 1.5
B 2010-07-02 12:00:00
C 1 days 12:00:00
Name: 0.5, dtype: object
"""
validate_percentile(q)
data = self._get_numeric_data() if numeric_only else self
axis = self._get_axis_number(axis)
is_transposed = axis == 1
if is_transposed:
data = data.T
if len(data.columns) == 0:
# GH#23925 _get_numeric_data may have dropped all columns
cols = Index([], name=self.columns.name)
if is_list_like(q):
return self._constructor([], index=q, columns=cols)
return self._constructor_sliced([], index=cols, name=q)
result = data._data.quantile(
qs=q, axis=1, interpolation=interpolation, transposed=is_transposed
)
if result.ndim == 2:
result = self._constructor(result)
else:
result = self._constructor_sliced(result, name=q)
if is_transposed:
result = result.T
return result
def to_timestamp(self, freq=None, how="start", axis=0, copy=True):
"""
Cast to DatetimeIndex of timestamps, at *beginning* of period.
Parameters
----------
freq : str, default frequency of PeriodIndex
Desired frequency.
how : {'s', 'e', 'start', 'end'}
Convention for converting period to timestamp; start of period
vs. end.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to convert (the index by default).
copy : bool, default True
If False then underlying input data is not copied.
Returns
-------
DataFrame with DatetimeIndex
"""
new_data = self._data
if copy:
new_data = new_data.copy()
axis = self._get_axis_number(axis)
if axis == 0:
new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how))
elif axis == 1:
new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how))
else: # pragma: no cover
raise AssertionError("Axis must be 0 or 1. Got {ax!s}".format(ax=axis))
return self._constructor(new_data)
def to_period(self, freq=None, axis=0, copy=True):
"""
Convert DataFrame from DatetimeIndex to PeriodIndex.
Convert DataFrame from DatetimeIndex to PeriodIndex with desired
frequency (inferred from index if not passed).
Parameters
----------
freq : str, default
Frequency of the PeriodIndex.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to convert (the index by default).
copy : bool, default True
If False then underlying input data is not copied.
Returns
-------
TimeSeries with PeriodIndex
"""
new_data = self._data
if copy:
new_data = new_data.copy()
axis = self._get_axis_number(axis)
if axis == 0:
new_data.set_axis(1, self.index.to_period(freq=freq))
elif axis == 1:
new_data.set_axis(0, self.columns.to_period(freq=freq))
else: # pragma: no cover
raise AssertionError("Axis must be 0 or 1. Got {ax!s}".format(ax=axis))
return self._constructor(new_data)
def isin(self, values):
"""
Whether each element in the DataFrame is contained in values.
Parameters
----------
values : iterable, Series, DataFrame or dict
The result will only be true at a location if all the
labels match. If `values` is a Series, that's the index. If
`values` is a dict, the keys must be the column names,
which must match. If `values` is a DataFrame,
then both the index and column labels must match.
Returns
-------
DataFrame
DataFrame of booleans showing whether each element in the DataFrame
is contained in values.
See Also
--------
DataFrame.eq: Equality test for DataFrame.
Series.isin: Equivalent method on Series.
Series.str.contains: Test if pattern or regex is contained within a
string of a Series or Index.
Examples
--------
>>> df = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]},
... index=['falcon', 'dog'])
>>> df
num_legs num_wings
falcon 2 2
dog 4 0
When ``values`` is a list check whether every value in the DataFrame
is present in the list (which animals have 0 or 2 legs or wings)
>>> df.isin([0, 2])
num_legs num_wings
falcon True True
dog False True
When ``values`` is a dict, we can pass values to check for each
column separately:
>>> df.isin({'num_wings': [0, 3]})
num_legs num_wings
falcon False False
dog False True
When ``values`` is a Series or DataFrame the index and column must
match. Note that 'falcon' does not match based on the number of legs
in df2.
>>> other = pd.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]},
... index=['spider', 'falcon'])
>>> df.isin(other)
num_legs num_wings
falcon True True
dog False False
"""
if isinstance(values, dict):
from pandas.core.reshape.concat import concat
values = collections.defaultdict(list, values)
return concat(
(
self.iloc[:, [i]].isin(values[col])
for i, col in enumerate(self.columns)
),
axis=1,
)
elif isinstance(values, Series):
if not values.index.is_unique:
raise ValueError("cannot compute isin with a duplicate axis.")
return self.eq(values.reindex_like(self), axis="index")
elif isinstance(values, DataFrame):
if not (values.columns.is_unique and values.index.is_unique):
raise ValueError("cannot compute isin with a duplicate axis.")
return self.eq(values.reindex_like(self))
else:
if not is_list_like(values):
raise TypeError(
f"only list-like or dict-like objects are allowed "
f"to be passed to DataFrame.isin(), "
f"you passed a {repr(type(values).__name__)}"
)
return DataFrame(
algorithms.isin(self.values.ravel(), values).reshape(self.shape),
self.index,
self.columns,
)
# ----------------------------------------------------------------------
# Add plotting methods to DataFrame
plot = CachedAccessor("plot", pandas.plotting.PlotAccessor)
hist = pandas.plotting.hist_frame
boxplot = pandas.plotting.boxplot_frame
sparse = CachedAccessor("sparse", SparseFrameAccessor)
DataFrame._setup_axes(
["index", "columns"],
docs={
"index": "The index (row labels) of the DataFrame.",
"columns": "The column labels of the DataFrame.",
},
)
DataFrame._add_numeric_operations()
DataFrame._add_series_or_dataframe_operations()
ops.add_flex_arithmetic_methods(DataFrame)
ops.add_special_arithmetic_methods(DataFrame)
def _from_nested_dict(data):
# TODO: this should be seriously cythonized
new_data = {}
for index, s in data.items():
for col, v in s.items():
new_data[col] = new_data.get(col, {})
new_data[col][index] = v
return new_data
def _put_str(s, space):
return "{s}".format(s=s)[:space].ljust(space)
| 34.529124
| 169
| 0.531435
|
70717a4d1424cf9b4ce01dc88af553dac4ab5ddb
| 11,946
|
py
|
Python
|
yt/frontends/stream/io.py
|
aemerick/yt
|
984484616d75c6d7603e71b9d45c5d617705a0e5
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/frontends/stream/io.py
|
aemerick/yt
|
984484616d75c6d7603e71b9d45c5d617705a0e5
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/frontends/stream/io.py
|
aemerick/yt
|
984484616d75c6d7603e71b9d45c5d617705a0e5
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
import numpy as np
from yt.utilities.io_handler import \
BaseIOHandler
from yt.utilities.logger import ytLogger as mylog
from yt.utilities.exceptions import YTDomainOverflow
class IOHandlerStream(BaseIOHandler):
_dataset_type = "stream"
_vector_fields = ("particle_velocity", "particle_position")
def __init__(self, ds):
self.fields = ds.stream_handler.fields
self.field_units = ds.stream_handler.field_units
super(IOHandlerStream, self).__init__(ds)
def _read_data_set(self, grid, field):
# This is where we implement processor-locking
#if grid.id not in self.grids_in_memory:
# mylog.error("Was asked for %s but I have %s", grid.id, self.grids_in_memory.keys())
# raise KeyError
tr = self.fields[grid.id][field]
# If it's particles, we copy.
if len(tr.shape) == 1: return tr.copy()
# New in-place unit conversion breaks if we don't copy first
return tr
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype not in self.ds.fluid_types for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = self.ds.arr(np.empty(size, dtype="float64"))
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s blocks",
size, [f2 for f1, f2 in fields], ng)
for field in fields:
ftype, fname = field
ind = 0
for chunk in chunks:
for g in chunk.objs:
ds = self.fields[g.id][ftype, fname]
ind += g.select(selector, ds, rv[field], ind) # caches
return rv
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
for chunk in chunks:
for g in chunk.objs:
if g.NumberOfParticles == 0: continue
gf = self.fields[g.id]
for ptype, field_list in sorted(ptf.items()):
if (ptype, "particle_position") in gf:
x, y, z = gf[ptype, "particle_position"].T
else:
x, y, z = (gf[ptype, "particle_position_%s" % ax] for
ax in 'xyz')
yield ptype, (x, y, z)
def _read_particle_fields(self, chunks, ptf, selector):
chunks = list(chunks)
for chunk in chunks:
for g in chunk.objs:
if g.NumberOfParticles == 0: continue
gf = self.fields[g.id]
for ptype, field_list in sorted(ptf.items()):
if (ptype, "particle_position") in gf:
x, y, z = gf[ptype, "particle_position"].T
else:
x, y, z = (gf[ptype, "particle_position_%s" % ax] for
ax in 'xyz')
mask = selector.select_points(x, y, z, 0.0)
if mask is None: continue
for field in field_list:
data = np.asarray(gf[ptype, field])
yield (ptype, field), data[mask]
@property
def _read_exception(self):
return KeyError
class StreamParticleIOHandler(BaseIOHandler):
_vector_fields = ("particle_position", "particle_velocity")
_dataset_type = "stream_particles"
_vector_fields = ("particle_velocity", "particle_position")
def __init__(self, ds):
self.fields = ds.stream_handler.fields
super(StreamParticleIOHandler, self).__init__(ds)
def _read_particle_coords(self, chunks, ptf):
for data_file in sorted(self._get_data_files(chunks),
key=lambda x: (x.filename,
x.start)):
f = self.fields[data_file.filename]
# This double-reads
for ptype, field_list in sorted(ptf.items()):
yield ptype, (f[ptype, "particle_position_x"],
f[ptype, "particle_position_y"],
f[ptype, "particle_position_z"])
def _read_smoothing_length(self, chunks, ptf, ptype):
for data_file in sorted(self._get_data_files(chunks),
key=lambda x: (x.filename,
x.start)):
f = self.fields[data_file.filename]
return f[ptype, 'smoothing_length']
def _get_data_files(self, chunks):
data_files = set([])
for chunk in chunks:
for obj in chunk.objs:
data_files.update(obj.data_files)
return data_files
def _count_particles_chunks(self, psize, chunks, ptf, selector):
for ptype, (x, y, z) in self._read_particle_coords(chunks, ptf):
if (ptype, 'smoothing_length') in self.ds.field_list:
hsml = self._read_smoothing_length(chunks, ptf, ptype)
else:
hsml = 0.0
psize[ptype] += selector.count_points(x, y, z, hsml)
return psize
def _read_particle_fields(self, chunks, ptf, selector):
for data_file in sorted(self._get_data_files(chunks),
key=lambda x: (x.filename,
x.start)):
f = self.fields[data_file.filename]
for ptype, field_list in sorted(ptf.items()):
if (ptype, "particle_position") in f:
ppos = f[ptype, "particle_position"]
x = ppos[:,0]
y = ppos[:,1]
z = ppos[:,2]
else:
x, y, z = (f[ptype, "particle_position_%s" % ax]
for ax in 'xyz')
if (ptype, 'smoothing_length') in self.ds.field_list:
hsml = f[ptype, 'smoothing_length']
else:
hsml = 0.0
mask = selector.select_points(x, y, z, hsml)
if mask is None:
continue
for field in field_list:
data = f[ptype, field][mask]
yield (ptype, field), data
def _yield_coordinates(self, data_file, needed_ptype=None):
# self.fields[g.id][fname] is the pattern here
for ptype in self.ds.particle_types_raw:
if needed_ptype is not None and needed_ptype is not ptype:
continue
try:
pos = np.column_stack([self.fields[data_file.filename][
(ptype, "particle_position_%s" % ax)] for ax in 'xyz'])
except KeyError:
pos = self.fields[data_file.filename][ptype, "particle_position"]
if np.any(pos.min(axis=0) < data_file.ds.domain_left_edge) or \
np.any(pos.max(axis=0) > data_file.ds.domain_right_edge):
raise YTDomainOverflow(pos.min(axis=0), pos.max(axis=0),
data_file.ds.domain_left_edge,
data_file.ds.domain_right_edge)
yield ptype, pos
def _get_smoothing_length(self, data_file, dtype, shape):
ptype = self.ds._sph_ptypes[0]
return self.fields[data_file.filename][ptype, 'smoothing_length']
def _count_particles(self, data_file):
pcount = {}
for ptype in self.ds.particle_types_raw:
pcount[ptype] = 0
# stream datasets only have one "file"
if data_file.file_id > 0:
return pcount
for ptype in self.ds.particle_types_raw:
d = self.fields[data_file.filename]
try:
pcount[ptype] = d[ptype, "particle_position_x"].size
except KeyError:
pcount[ptype] = d[ptype, "particle_position"].shape[0]
return pcount
def _identify_fields(self, data_file):
return self.fields[data_file.filename].keys(), {}
class IOHandlerStreamHexahedral(BaseIOHandler):
_dataset_type = "stream_hexahedral"
_vector_fields = ("particle_velocity", "particle_position")
def __init__(self, ds):
self.fields = ds.stream_handler.fields
super(IOHandlerStreamHexahedral, self).__init__(ds)
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
assert(len(chunks) == 1)
chunk = chunks[0]
rv = {}
for field in fields:
ftype, fname = field
rv[field] = np.empty(size, dtype="float64")
ngrids = sum(len(chunk.objs) for chunk in chunks)
mylog.debug("Reading %s cells of %s fields in %s blocks",
size, [fn for ft, fn in fields], ngrids)
for field in fields:
ind = 0
ftype, fname = field
for chunk in chunks:
for g in chunk.objs:
ds = self.fields[g.mesh_id].get(field, None)
if ds is None:
ds = self.fields[g.mesh_id][fname]
ind += g.select(selector, ds, rv[field], ind) # caches
return rv
class IOHandlerStreamOctree(BaseIOHandler):
_dataset_type = "stream_octree"
_vector_fields = ("particle_velocity", "particle_position")
def __init__(self, ds):
self.fields = ds.stream_handler.fields
super(IOHandlerStreamOctree, self).__init__(ds)
def _read_fluid_selection(self, chunks, selector, fields, size):
rv = {}
ind = 0
chunks = list(chunks)
assert(len(chunks) == 1)
for chunk in chunks:
assert(len(chunk.objs) == 1)
for subset in chunk.objs:
field_vals = {}
for field in fields:
field_vals[field] = self.fields[
subset.domain_id - subset._domain_offset][field]
subset.fill(field_vals, rv, selector, ind)
return rv
class IOHandlerStreamUnstructured(BaseIOHandler):
_dataset_type = "stream_unstructured"
def __init__(self, ds):
self.fields = ds.stream_handler.fields
super(IOHandlerStreamUnstructured, self).__init__(ds)
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
rv = {}
for field in fields:
ftype, fname = field
if ftype == "all":
ci = np.concatenate([mesh.connectivity_indices
for mesh in self.ds.index.mesh_union])
else:
mesh_id = int(ftype[-1]) - 1
m = self.ds.index.meshes[mesh_id]
ci = m.connectivity_indices
num_elem = ci.shape[0]
if fname in self.ds._node_fields:
nodes_per_element = ci.shape[1]
rv[field] = np.empty((num_elem, nodes_per_element), dtype="float64")
else:
rv[field] = np.empty(num_elem, dtype="float64")
for field in fields:
ind = 0
ftype, fname = field
if ftype == "all":
objs = [mesh for mesh in self.ds.index.mesh_union]
else:
mesh_ids = [int(ftype[-1])]
chunk = chunks[mesh_ids[0] - 1]
objs = chunk.objs
for g in objs:
ds = self.fields[g.mesh_id].get(field, None)
if ds is None:
f = ('connect%d' % (g.mesh_id + 1), fname)
ds = self.fields[g.mesh_id][f]
ind += g.select(selector, ds, rv[field], ind) # caches
rv[field] = rv[field][:ind]
return rv
| 41.193103
| 96
| 0.53407
|
00617736ea06d241d0707e5eb0ef471b6054cd52
| 2,459
|
py
|
Python
|
Automatic_Inter_var_server_new.py
|
krishdb38/Bio_python
|
e20ba5579e1e6b4ecf9fe72ef9e21a5960416ae4
|
[
"MIT"
] | null | null | null |
Automatic_Inter_var_server_new.py
|
krishdb38/Bio_python
|
e20ba5579e1e6b4ecf9fe72ef9e21a5960416ae4
|
[
"MIT"
] | null | null | null |
Automatic_Inter_var_server_new.py
|
krishdb38/Bio_python
|
e20ba5579e1e6b4ecf9fe72ef9e21a5960416ae4
|
[
"MIT"
] | null | null | null |
#/bin/python
import os
import subprocess
from subprocess import run
#! This must be run in linux
in_path = "/BiO/Preterm/raw_data/"
# Old 20 and new 40 are different
out_path = "/BiO/Preterm/intervar_result/"
# Check for old and New
#files = [1119,910,873,1489,989,880,1282,1584,875,1009,1093,1134,1293,1316,1389,1428,1477,1481,1557,1591]
#files = ["MWB_1008",'MWB_1040','MWB_110','MWB_1151','MWB_1226','MWB_1244','MWB_1250','MWB_1276','MWB_1443','MWB_1445','MWB_161','MWB_1676','MWB_174']
files = ['MWB_1960','MWB_1964','MWB_1975','MWB_1977','MWB_1982','MWB_1995','MWB_2007','MWB_2008','MWB_242','MWB_276','MWB_437','MWB_490','MWB_506','MWB_540']
#files = ['MWB_548','MWB_564','MWB_626','MWB_719','MWB_740','MWB_746','MWB_815','MWB_870','MWB_895','MWB_911','MWB_941','MWB_956','MWB_961']
for _ in files:
vcf_file = in_path+ str(_) +"/"+ str(_)+".PASS.vcf"
#if vcf_file.endswith(".vcf") and len(_) <10:
#vcf_file = _
#name = _.split(".")[0]
# print(name)
try:
# Check for the Folder exists or Not
os.mkdir(out_path+str(_))
except FileExistsError as e:
pass
condition = os.path.isfile(out_path+ str(_)+"/"+str(_)+".hg19_multianno.txt") # .anno.hg19_multianno.txt
if condition:
print(str(_)+"skipping\n\n")
else:
error_out = open(out_path+"/"+str(_)+"_error.txt","w+")
cmd = ["python","Intervar.py", "-b","hg19", "-i", vcf_file, "--input_type", "VCF", "-o", out_path+ str(_)+"/"+ str(_)]
print(cmd)
print("\n\nDoing Subprocess in ", str(_))
#process = subprocess.run(cmd,universal_newlines = True, stdout = error_out,\
# stderr = subprocess.PIPE, shell = False)
#if process.stderr:
# print(process.stderr)
#else:
# print("Success", name, "\n")
#print(process.returncode)
print("All tasks Completed")
# If some process already done no need to repeat so we also need to check weather processed file exists or not
#
#process = subprocess.Popen(cmd,universal_newlines = True, stdout = subprocess.PIPE,\
# stderr = subprocess.PIPE, shell = False)
#std_out = process.communicate()[0]
# For LInux remove shell = True
#print(out.stderr)
#print(process.returncode)
#print(process.stdout)
# We will run Foor loop later But now lets test weather the Output is correct or not
#for i in range(100):
#os.makedirs(str(i))
#os.removedirs(str(i))
| 33.684932
| 157
| 0.640911
|
16b1dc7bc0cc1d0714307f72eaaf4b99e2eb8741
| 8,221
|
py
|
Python
|
salt/modules/test.py
|
sunbenxin/salt
|
b821f6a174e67a3e1def1ba7fa16885cd985bb0c
|
[
"Apache-2.0"
] | 1
|
2016-03-13T09:05:15.000Z
|
2016-03-13T09:05:15.000Z
|
salt/modules/test.py
|
sunbenxin/salt
|
b821f6a174e67a3e1def1ba7fa16885cd985bb0c
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/test.py
|
sunbenxin/salt
|
b821f6a174e67a3e1def1ba7fa16885cd985bb0c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Module for running arbitrary tests
'''
# Import Python libs
import os
import sys
import time
import random
# Import Salt libs
import salt
import salt.version
import salt.loader
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
.. code-block:: bash
salt '*' test.echo 'foo bar baz quo qux'
'''
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
.. code-block:: bash
salt '*' test.ping
'''
return True
def sleep(length):
'''
Instruct the minion to initiate a process that will sleep for a given
period of time.
CLI Example:
.. code-block:: bash
salt '*' test.sleep 20
'''
time.sleep(int(length))
return True
def rand_sleep(max=60):
'''
Sleep for a random number of seconds, used to test long-running commands
and minions returning at differing intervals
CLI Example:
.. code-block:: bash
salt '*' test.rand_sleep 60
'''
time.sleep(random.randint(0, max))
return True
def version():
'''
Return the version of salt on the minion
CLI Example:
.. code-block:: bash
salt '*' test.version
'''
return salt.__version__
def versions_information():
'''
Returns versions of components used by salt as a dict
CLI Example:
.. code-block:: bash
salt '*' test.versions_information
'''
return dict(salt.version.versions_information())
def versions_report():
'''
Returns versions of components used by salt
CLI Example:
.. code-block:: bash
salt '*' test.versions_report
'''
return '\n'.join(salt.version.versions_report())
def conf_test():
'''
Return the value for test.foo in the minion configuration file, or return
the default value
CLI Example:
.. code-block:: bash
salt '*' test.conf_test
'''
return __salt__['config.option']('test.foo')
def get_opts():
'''
Return the configuration options passed to this minion
CLI Example:
.. code-block:: bash
salt '*' test.get_opts
'''
return __opts__
def cross_test(func, args=None):
'''
Execute a minion function via the __salt__ object in the test
module, used to verify that the minion functions can be called
via the __salt__ module.
CLI Example:
.. code-block:: bash
salt '*' test.cross_test file.gid_to_group 0
'''
if args is None:
args = []
return __salt__[func](*args)
def kwarg(**kwargs):
'''
Print out the data passed into the function ``**kwargs``, this is used to
both test the publication data and cli kwarg passing, but also to display
the information available within the publication data.
CLI Example:
.. code-block:: bash
salt '*' test.kwarg num=1 txt="two" env='{a: 1, b: "hello"}'
'''
return kwargs
def arg(*args, **kwargs):
'''
Print out the data passed into the function ``*args`` and ```kwargs``, this
is used to both test the publication data and cli argument passing, but
also to display the information available within the publication data.
Returns {"args": args, "kwargs": kwargs}.
CLI Example:
.. code-block:: bash
salt '*' test.arg 1 "two" 3.1 txt="hello" wow='{a: 1, b: "hello"}'
'''
return {"args": args, "kwargs": kwargs}
def arg_repr(*args, **kwargs):
'''
Print out the data passed into the function ``*args`` and ```kwargs``, this
is used to both test the publication data and cli argument passing, but
also to display the information available within the publication data.
Returns {"args": repr(args), "kwargs": repr(kwargs)}.
CLI Example:
.. code-block:: bash
salt '*' test.arg_repr 1 "two" 3.1 txt="hello" wow='{a: 1, b: "hello"}'
'''
return {"args": repr(args), "kwargs": repr(kwargs)}
def fib(num):
'''
Return a Fibonacci sequence up to the passed number, and the
timeit took to compute in seconds. Used for performance tests
CLI Example:
.. code-block:: bash
salt '*' test.fib 3
'''
num = int(num)
start = time.time()
fib_a, fib_b = 0, 1
ret = [0]
while fib_b < num:
ret.append(fib_b)
fib_a, fib_b = fib_b, fib_a + fib_b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number,
returns the sequence and the time it took to compute. Used for
performance tests.
CLI Example:
.. code-block:: bash
salt '*' test.collatz 3
'''
start = int(start)
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
def outputter(data):
'''
Test the outputter, pass in data to return
CLI Example:
.. code-block:: bash
salt '*' test.outputter foobar
'''
return data
def retcode(code=42):
'''
Test that the returncode system is functioning correctly
CLI Example:
.. code-block:: bash
salt '*' test.retcode 42
'''
__context__['retcode'] = code
return True
def provider(module):
'''
Pass in a function name to discover what provider is being used
CLI Example:
.. code-block:: bash
salt '*' test.provider service
'''
func = ''
for key in __salt__:
if not key.startswith('{0}.'.format(module)):
continue
func = key
break
if not func:
return ''
pfn = sys.modules[__salt__[func].__module__].__file__
pfn = os.path.basename(pfn)
return pfn[:pfn.rindex('.')]
def providers():
'''
Return a dict of the provider names and the files that provided them
CLI Example:
.. code-block:: bash
salt '*' test.providers
'''
ret = {}
for funcname in __salt__:
modname = funcname.split('.')[0]
if modname not in ret:
ret[provider(modname)] = modname
return ret
def not_loaded():
'''
List the modules that were not loaded by the salt loader system
CLI Example:
.. code-block:: bash
salt '*' test.not_loaded
'''
prov = providers()
ret = set()
loader = salt.loader._create_loader(__opts__, 'modules', 'module')
for mod_dir in loader.module_dirs:
if not os.path.isabs(mod_dir):
continue
if not os.path.isdir(mod_dir):
continue
for fn_ in os.listdir(mod_dir):
if fn_.startswith('_'):
continue
name = fn_.split('.')[0]
if name not in prov:
ret.add(name)
return sorted(ret)
def opts_pkg():
'''
Return an opts package with the grains and opts for this minion.
This is primarily used to create the options used for master side
state compiling routines
CLI Example:
.. code-block:: bash
salt '*' test.opts_pkg
'''
ret = {}
ret.update(__opts__)
ret['grains'] = __grains__
return ret
def tty(device, echo=None):
'''
Echo a string to a specific tty
CLI Example:
.. code-block:: bash
salt '*' test.tty tty0 'This is a test'
salt '*' test.tty pts3 'This is a test'
'''
if device.startswith('tty'):
teletype = '/dev/{0}'.format(device)
elif device.startswith('pts'):
teletype = '/dev/{0}'.format(device.replace('pts', 'pts/'))
else:
return {'Error': 'The specified device is not a valid TTY'}
cmd = 'echo {0} > {1}'.format(echo, teletype)
ret = __salt__['cmd.run_all'](cmd)
if ret['retcode'] == 0:
return {
'Success': 'Message was successfully echoed to {0}'.format(teletype)
}
else:
return {
'Error': 'Echoing to {0} returned error code {1}'.format(
teletype,
ret['retcode'])
}
| 20.865482
| 80
| 0.588006
|
4b5cfe0982be016ffaf7b0bfe9d6cf4590bccc24
| 1,062
|
py
|
Python
|
independence_tests/mi/var_info.py
|
marcuskaiser/projects
|
4841bb3adcd8625681d50bc8a2eed599eae44305
|
[
"MIT"
] | 1
|
2020-04-20T20:05:00.000Z
|
2020-04-20T20:05:00.000Z
|
independence_tests/mi/var_info.py
|
marcuskaiser/projects
|
4841bb3adcd8625681d50bc8a2eed599eae44305
|
[
"MIT"
] | null | null | null |
independence_tests/mi/var_info.py
|
marcuskaiser/projects
|
4841bb3adcd8625681d50bc8a2eed599eae44305
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy.stats import entropy
from independence_tests.mi.utils import _check_xy, _get_bins, _mi
from independence_tests.utils import fast_rank
def var_info(x, y, bins=None, rank=False, norm=False):
"""
Variation of Information
https://en.wikipedia.org/wiki/Variation_of_information
References
- "de Prado, Marcos Lopez. Machine Learning for Asset Managers.
Cambridge University Press, 2020."
- Hacine-Gharbi and Ravier (2018)
First creates a histogram of the data, and then applies a discrete
estimators for mutual information and entropy terms.
"""
x, y = _check_xy(x, y)
if rank is True:
x = fast_rank(x)
y = fast_rank(y)
bins = _get_bins(bins=bins, x=x, y=y)
count_xy = np.lib.histogramdd([x, y], bins)[0]
i_xy, count_x, count_y = _mi(count_xy=count_xy)
h_x = entropy(count_x)
h_y = entropy(count_y)
v_xy = max(h_x + h_y - 2.0 * i_xy, 0.0)
if norm:
h_xy = max(h_x + h_y - i_xy, 1e-8)
return v_xy / h_xy
return v_xy
| 27.230769
| 70
| 0.6629
|
de7995d52e07fbbcdaadec89aa51fc87a77616e6
| 2,829
|
py
|
Python
|
_2020.py
|
LariUnkari/advent-of-code-2020
|
426142a6911d0f186967db9aa84432ceaf8c7bf0
|
[
"CC0-1.0"
] | null | null | null |
_2020.py
|
LariUnkari/advent-of-code-2020
|
426142a6911d0f186967db9aa84432ceaf8c7bf0
|
[
"CC0-1.0"
] | null | null | null |
_2020.py
|
LariUnkari/advent-of-code-2020
|
426142a6911d0f186967db9aa84432ceaf8c7bf0
|
[
"CC0-1.0"
] | null | null | null |
"""Advent of Code 2020 Solutions
Author: Lari Unkari
"""
#Definitions
import os, importlib, modules.userInput
DAY_COUNT = len([f for f in os.listdir("days/") if f.startswith("day") == True])
def get_day_input():
"""Takes in user input for day choice"""
print(f"Select day (1-{DAY_COUNT:d}), then press enter.\n"+
"Give an empty input or 'exit' to end program\n")
return input("Choose the day: ")
def get_program_and_input(input_string):
"""Returns a day solution program and input as tuple (module, input_file). If invalid, returns (None, None)"""
mod = None
modName = "day{0:02d}"
filepath = "data/input{0:02d}.txt"
try:
value = int(input_string)
if value < 1:
print(f"Invalid day value {value} given!\n")
return (None, None)
elif value > DAY_COUNT:
print(f"Day {value} has not been reached yet!\n")
return (None, None)
else:
day = modName.format(value)
print(f"Day {value} given, importing {day}")
mod = importlib.import_module("."+day, package='days')
except ValueError:
print(f"Invalid input {input_string} given!")
return (None, None)
return (mod, open(filepath.format(value), "r"))
#Program
USER_INPUT = "0"
print("Advent of Code 2020 by Lari Unkari\n\n")
while True:
USER_INPUT = get_day_input()
if len(USER_INPUT) == 0 or USER_INPUT.strip() == "exit":
break
params = get_program_and_input(USER_INPUT)
if params != None and params[0] != None:
mod = params[0]
if mod == None:
print(f"No module found for {USER_INPUT}")
break
input_file = params[1]
if input_file == None:
print(f"No input file found for {USER_INPUT}")
break
part_input = modules.userInput.get_int_input_constrained("\nWhich part to run? 1-2 (defaults to 2): ", 1, 2, 2)
#Input is a Tuple of (was_parse_success, list_of_int_values)
program_input = modules.userInput.get_int_list_input("\nProgram input: ",
"Invalid input {0}, try again or press enter without input to exit!")
if not program_input[0]:
break
input_length = len(program_input)
if input_length > 0:
print(f"Input value list[0-{input_length-1}]: {program_input[1]}")
else:
print("No input given")
log_level_input = modules.userInput.get_int_input("\nLog level (defaults to level zero): ", None)
print("\n\n************************\n")
mod.play(params[1], part_input[1], program_input[1], log_level_input[1] if log_level_input[0] else 0)
print(f"\nModule {mod.__name__} program ended\n\n")
print("Goodbye and Merry Christmas 2020!")
| 30.75
| 119
| 0.604454
|
5fa59caac11b5de7bac8da330cf15072ff6e7cca
| 22,649
|
py
|
Python
|
contrib/devtools/copyright_header.py
|
suprnurd/MicroPaymentCoin
|
a3cd265a866b8213d6150e3a8848f2b3adc35512
|
[
"MIT"
] | null | null | null |
contrib/devtools/copyright_header.py
|
suprnurd/MicroPaymentCoin
|
a3cd265a866b8213d6150e3a8848f2b3adc35512
|
[
"MIT"
] | null | null | null |
contrib/devtools/copyright_header.py
|
suprnurd/MicroPaymentCoin
|
a3cd265a866b8213d6150e3a8848f2b3adc35512
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Copyright (c) 2018-2019 The PIVX developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# auto generated:
'src/qt/micropaymentcoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/tinyformat.h',
'src/crypto/scrypt.cpp',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
EXCLUDE_DIRS = [
# git subtrees
"src/crypto/ctaes/",
"src/leveldb/",
"src/secp256k1/",
"src/univalue/",
]
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.py']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
for excluded_dir in EXCLUDE_DIRS:
if filename.startswith(excluded_dir):
return False
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files --full-name'.split(' ')
GIT_TOPLEVEL_CMD = 'git rev-parse --show-toplevel'.split(' ')
def call_git_ls(base_directory):
out = subprocess.check_output([*GIT_LS_CMD, base_directory])
return [f for f in out.decode("utf-8").split('\n') if f != '']
def call_git_toplevel():
"Returns the absolute path to the project root"
return subprocess.check_output(GIT_TOPLEVEL_CMD).strip().decode("utf-8")
def get_filenames_to_examine(base_directory):
"Returns an array of absolute paths to any project files in the base_directory that pass the include/exclude filters"
root = call_git_toplevel()
filenames = call_git_ls(base_directory)
return sorted([os.path.join(root, filename) for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = 'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile('%s %s,? %s' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
"Satoshi Nakamoto\n",
"The Bitcoin Core developers\n",
"The Bitcoin Core developers \n",
"Bitcoin Core Developers\n",
"the Bitcoin Core developers\n",
"The Bitcoin developers\n",
"The LevelDB Authors\. All rights reserved\.\n",
"BitPay Inc\.\n",
"BitPay, Inc\.\n",
"University of Illinois at Urbana-Champaign\.\n",
"MarcoFalke\n",
"Pieter Wuille\n",
"Pieter Wuille +\*\n",
"Pieter Wuille, Gregory Maxwell +\*\n",
"Pieter Wuille, Andrew Poelstra +\*\n",
"Ian Miers, Christina Garman and Matthew Green\n",
"Andrew Poelstra +\*\n",
"Wladimir J. van der Laan\n",
"Jeff Garzik\n",
"Diederik Huys, Pieter Wuille +\*\n",
"Thomas Daede, Cory Fields +\*\n",
"Jan-Klaas Kollhof\n",
"Sam Rushing\n",
"ArtForz -- public domain half-a-node\n",
" Projet RNRT SAPHIR\n",
"The Zcash developers\n",
"The Dash developers\n",
"The Dash Developers\n",
"The Dash Core developers\n",
"The MicroPaymentCoin developers\n",
"The PPCoin developers\n",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(filename, 'r', encoding="utf8").read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] >= 4]
print("%4d with four or more copyrights" % len(four_or_more_copyrights))
print_filenames(four_or_more_copyrights, verbose)
print('')
print(SEPARATOR)
print('Copyrights with dominant style:\ne.g. "Copyright (c)" and '
'"<year>" or "<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
dominant_style = [i['filename'] for i in file_infos if
i['dominant_style'][holder_name]]
if len(dominant_style) > 0:
print("%4d with '%s'" % (len(dominant_style),
holder_name.replace('\n', '\\n')))
print_filenames(dominant_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with year list style:\ne.g. "Copyright (c)" and '
'"<year1>, <year2>, ...":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
year_list_style = [i['filename'] for i in file_infos if
i['year_list_style'][holder_name]]
if len(year_list_style) > 0:
print("%4d with '%s'" % (len(year_list_style),
holder_name.replace('\n', '\\n')))
print_filenames(year_list_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "<year>" or '
'"<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
without_c_style = [i['filename'] for i in file_infos if
i['without_c_style'][holder_name]]
if len(without_c_style) > 0:
print("%4d with '%s'" % (len(without_c_style),
holder_name.replace('\n', '\\n')))
print_filenames(without_c_style, verbose)
print('')
print(SEPARATOR)
unclassified_copyrights = [i['filename'] for i in file_infos if
i['classified_copyrights'] < i['all_copyrights']]
print("%d with unexpected copyright holder names" %
len(unclassified_copyrights))
print_filenames(unclassified_copyrights, verbose)
print(SEPARATOR)
def exec_report(base_directory, verbose):
filenames = get_filenames_to_examine(base_directory)
file_infos = [gather_file_info(f) for f in filenames]
print_report(file_infos, verbose)
################################################################################
# report cmd
################################################################################
REPORT_USAGE = """
Produces a report of all copyright header notices found inside the source files
of a repository.
Usage:
$ ./copyright_header.py report <base_directory> [verbose]
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
[verbose] - Includes a list of every file of each subcategory in the report.
"""
def report_cmd(argv):
if len(argv) == 2:
sys.exit(REPORT_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad <base_directory>: %s" % base_directory)
if len(argv) == 3:
verbose = False
elif argv[3] == 'verbose':
verbose = True
else:
sys.exit("*** unknown argument: %s" % argv[2])
exec_report(base_directory, verbose)
################################################################################
# query git for year of last change
################################################################################
GIT_LOG_CMD = "git log --pretty=format:%%ai %s"
def call_git_log(filename):
out = subprocess.check_output((GIT_LOG_CMD % filename).split(' '))
return out.decode("utf-8").split('\n')
def get_git_change_years(filename):
git_log_lines = call_git_log(filename)
if len(git_log_lines) == 0:
return [datetime.date.today().year]
# timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600"
return [line.split(' ')[0].split('-')[0] for line in git_log_lines]
def get_most_recent_git_change_year(filename):
return max(get_git_change_years(filename))
################################################################################
# read and write to file
################################################################################
def read_file_lines(filename):
f = open(filename, 'r', encoding="utf8")
file_lines = f.readlines()
f.close()
return file_lines
def write_file_lines(filename, file_lines):
f = open(filename, 'w', encoding="utf8")
f.write(''.join(file_lines))
f.close()
################################################################################
# update header years execution
################################################################################
COPYRIGHT = 'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The MicroPaymentCoin developers'
UPDATEABLE_LINE_COMPILED = re.compile(' '.join([COPYRIGHT, YEAR_RANGE, HOLDER]))
def get_updatable_copyright_line(file_lines):
index = 0
for line in file_lines:
if UPDATEABLE_LINE_COMPILED.search(line) is not None:
return index, line
index = index + 1
return None, None
def parse_year_range(year_range):
year_split = year_range.split('-')
start_year = year_split[0]
if len(year_split) == 1:
return start_year, start_year
return start_year, year_split[1]
def year_range_to_str(start_year, end_year):
if start_year == end_year:
return start_year
return "%s-%s" % (start_year, end_year)
def create_updated_copyright_line(line, last_git_change_year):
copyright_splitter = 'Copyright (c) '
copyright_split = line.split(copyright_splitter)
# Preserve characters on line that are ahead of the start of the copyright
# notice - they are part of the comment block and vary from file-to-file.
before_copyright = copyright_split[0]
after_copyright = copyright_split[1]
space_split = after_copyright.split(' ')
year_range = space_split[0]
start_year, end_year = parse_year_range(year_range)
if end_year == last_git_change_year:
return line
return (before_copyright + copyright_splitter +
year_range_to_str(start_year, last_git_change_year) + ' ' +
' '.join(space_split[1:]))
def update_updatable_copyright(filename):
file_lines = read_file_lines(filename)
index, line = get_updatable_copyright_line(file_lines)
if not line:
print_file_action_message(filename, "No updatable copyright.")
return
last_git_change_year = get_most_recent_git_change_year(filename)
new_line = create_updated_copyright_line(line, last_git_change_year)
if line == new_line:
print_file_action_message(filename, "Copyright up-to-date.")
return
file_lines[index] = new_line
write_file_lines(filename, file_lines)
print_file_action_message(filename,
"Copyright updated! -> %s" % last_git_change_year)
def exec_update_header_year(base_directory):
for filename in get_filenames_to_examine(base_directory):
update_updatable_copyright(filename)
################################################################################
# update cmd
################################################################################
UPDATE_USAGE = """
Updates all the copyright headers of "The PIVX developers" which were
changed in a year more recent than is listed. For example:
// Copyright (c) <firstYear>-<lastYear> The PIVX developers
will be updated to:
// Copyright (c) <firstYear>-<lastModifiedYear> The PIVX developers
where <lastModifiedYear> is obtained from the 'git log' history.
This subcommand also handles copyright headers that have only a single year. In those cases:
// Copyright (c) <year> The PIVX developers
will be updated to:
// Copyright (c) <year>-<lastModifiedYear> The PIVX developers
where the update is appropriate.
Usage:
$ ./copyright_header.py update <base_directory>
Arguments:
<base_directory> - The base directory of a micropaymentcoin source code repository.
"""
def print_file_action_message(filename, action):
print("%-52s %s" % (filename, action))
def update_cmd(argv):
if len(argv) != 3:
sys.exit(UPDATE_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad base_directory: %s" % base_directory)
exec_update_header_year(base_directory)
################################################################################
# inserted copyright header format
################################################################################
def get_header_lines(header, start_year, end_year):
lines = header.split('\n')[1:-1]
lines[0] = lines[0] % year_range_to_str(start_year, end_year)
return [line + '\n' for line in lines]
CPP_HEADER = '''
// Copyright (c) %s The PIVX developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_cpp_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(CPP_HEADER, start_year, end_year))
PYTHON_HEADER = '''
# Copyright (c) %s The PIVX developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_python_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(PYTHON_HEADER, start_year, end_year))
################################################################################
# query git for year of last change
################################################################################
def get_git_change_year_range(filename):
years = get_git_change_years(filename)
return min(years), max(years)
################################################################################
# check for existing core copyright
################################################################################
def file_already_has_core_copyright(file_lines):
index, _ = get_updatable_copyright_line(file_lines)
return index is not None
################################################################################
# insert header execution
################################################################################
def file_has_hashbang(file_lines):
if len(file_lines) < 1:
return False
if len(file_lines[0]) <= 2:
return False
return file_lines[0][:2] == '#!'
def insert_python_header(filename, file_lines, start_year, end_year):
if file_has_hashbang(file_lines):
insert_idx = 1
else:
insert_idx = 0
header_lines = get_python_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(insert_idx, line)
write_file_lines(filename, file_lines)
def insert_cpp_header(filename, file_lines, start_year, end_year):
header_lines = get_cpp_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(0, line)
write_file_lines(filename, file_lines)
def exec_insert_header(filename, style):
file_lines = read_file_lines(filename)
if file_already_has_core_copyright(file_lines):
sys.exit('*** %s already has a copyright by The PIVX developers'
% (filename))
start_year, end_year = get_git_change_year_range(filename)
if style == 'python':
insert_python_header(filename, file_lines, start_year, end_year)
else:
insert_cpp_header(filename, file_lines, start_year, end_year)
################################################################################
# insert cmd
################################################################################
INSERT_USAGE = """
Inserts a copyright header for "The PIVX developers" at the top of the
file in either Python or C++ style as determined by the file extension. If the
file is a Python file and it has a '#!' starting the first line, the header is
inserted in the line below it.
The copyright dates will be set to be:
"<year_introduced>-<current_year>"
where <year_introduced> is according to the 'git log' history. If
<year_introduced> is equal to <current_year>, the date will be set to be:
"<current_year>"
If the file already has a copyright for "The PIVX developers", the
script will exit.
Usage:
$ ./copyright_header.py insert <file>
Arguments:
<file> - A source file in the bitcoin repository.
"""
def insert_cmd(argv):
if len(argv) != 3:
sys.exit(INSERT_USAGE)
filename = argv[2]
if not os.path.isfile(filename):
sys.exit("*** bad filename: %s" % filename)
_, extension = os.path.splitext(filename)
if extension not in ['.h', '.cpp', '.cc', '.c', '.py']:
sys.exit("*** cannot insert for file extension %s" % extension)
if extension == '.py':
style = 'python'
else:
style = 'cpp'
exec_insert_header(filename, style)
################################################################################
# UI
################################################################################
USAGE = """
copyright_header.py - utilities for managing copyright headers of 'The Bitcoin
Core developers' in repository source files.
Usage:
$ ./copyright_header <subcommand>
Subcommands:
report
update
insert
To see subcommand usage, run them without arguments.
"""
SUBCOMMANDS = ['report', 'update', 'insert']
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.exit(USAGE)
subcommand = sys.argv[1]
if subcommand not in SUBCOMMANDS:
sys.exit(USAGE)
if subcommand == 'report':
report_cmd(sys.argv)
elif subcommand == 'update':
update_cmd(sys.argv)
elif subcommand == 'insert':
insert_cmd(sys.argv)
| 36.47182
| 121
| 0.601484
|
5bbabfddc5fd8e9ed9f1043bc202c63580744030
| 14,278
|
py
|
Python
|
astropy/io/misc/tests/test_hdf5.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | 3
|
2018-03-20T15:09:16.000Z
|
2021-05-27T11:17:33.000Z
|
astropy/io/misc/tests/test_hdf5.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/io/misc/tests/test_hdf5.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import pytest
import numpy as np
from ....tests.helper import catch_warnings
from ....table import Table, Column
try:
import h5py
except ImportError:
HAS_H5PY = False
else:
HAS_H5PY = True
try:
import yaml
except ImportError:
HAS_YAML = False
else:
HAS_YAML = True
ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8,
np.int16, np.int32, np.int64, np.float32, np.float64,
np.bool, '|S3']
def _default_values(dtype):
if dtype == np.bool:
return [0, 1, 1]
elif dtype == '|S3':
return [b'abc', b'def', b'ghi']
else:
return [1, 2, 3]
@pytest.mark.skipif('not HAS_H5PY')
def test_write_nopath(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(ValueError) as exc:
t1.write(test_file)
assert exc.value.args[0] == "table path should be set via the path= argument"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_notable_nopath(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
with pytest.raises(ValueError) as exc:
t1 = Table.read(test_file, path='/', format='hdf5')
assert exc.value.args[0] == 'no table found in HDF5 group /'
@pytest.mark.skipif('not HAS_H5PY')
def test_read_nopath(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
t2 = Table.read(test_file)
assert np.all(t1['a'] == t2['a'])
@pytest.mark.skipif('not HAS_H5PY')
def test_write_invalid_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(ValueError) as exc:
t1.write(test_file, path='test/')
assert exc.value.args[0] == "table path should end with table name, not /"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_invalid_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
with pytest.raises(IOError) as exc:
Table.read(test_file, path='test/')
assert exc.value.args[0] == "Path test/ does not exist"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_missing_group(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
with pytest.raises(IOError) as exc:
Table.read(test_file, path='test/path/table')
assert exc.value.args[0] == "Path test/path/table does not exist"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_missing_table(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
with h5py.File(test_file, 'w') as f:
f.create_group('test').create_group('path')
with pytest.raises(IOError) as exc:
Table.read(test_file, path='test/path/table')
assert exc.value.args[0] == "Path test/path/table does not exist"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_missing_group_fileobj(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
with h5py.File(test_file, 'w') as f:
with pytest.raises(IOError) as exc:
Table.read(f, path='test/path/table')
assert exc.value.args[0] == "Path test/path/table does not exist"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_simple(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing_table(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
with pytest.raises(IOError) as exc:
t1.write(test_file, path='the_table', append=True)
assert exc.value.args[0] == "Table the_table already exists"
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_memory(tmpdir):
with h5py.File('test', 'w', driver='core', backing_store=False) as output_file:
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='the_table')
t2 = Table.read(output_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(IOError) as exc:
t1.write(test_file, path='the_table')
assert exc.value.args[0].startswith("File exists:")
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing_overwrite(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table', overwrite=True)
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing_append(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table_1', append=True)
t1.write(test_file, path='the_table_2', append=True)
t2 = Table.read(test_file, path='the_table_1')
assert np.all(t2['a'] == [1, 2, 3])
t3 = Table.read(test_file, path='the_table_2')
assert np.all(t3['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing_append_groups(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
with h5py.File(test_file, 'w') as f:
f.create_group('test_1')
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='test_1/the_table_1', append=True)
t1.write(test_file, path='test_2/the_table_2', append=True)
t2 = Table.read(test_file, path='test_1/the_table_1')
assert np.all(t2['a'] == [1, 2, 3])
t3 = Table.read(test_file, path='test_2/the_table_2')
assert np.all(t3['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_write_existing_append_overwrite(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='table1')
t1.write(test_file, path='table2', append=True)
t1v2 = Table()
t1v2.add_column(Column(name='a', data=[4, 5, 6]))
with pytest.raises(IOError) as exc:
t1v2.write(test_file, path='table1', append=True)
assert exc.value.args[0] == 'Table table1 already exists'
t1v2.write(test_file, path='table1', append=True, overwrite=True)
t2 = Table.read(test_file, path='table1')
assert np.all(t2['a'] == [4, 5, 6])
t3 = Table.read(test_file, path='table2')
assert np.all(t3['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_fileobj(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
import h5py
with h5py.File(test_file, 'r') as input_file:
t2 = Table.read(input_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_filobj_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='path/to/data/the_table')
import h5py
with h5py.File(test_file, 'r') as input_file:
t2 = Table.read(input_file, path='path/to/data/the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_filobj_group_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='path/to/data/the_table')
import h5py
with h5py.File(test_file, 'r') as input_file:
t2 = Table.read(input_file['path/to'], path='data/the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_read_wrong_fileobj():
class FakeFile(object):
def read(self):
pass
f = FakeFile()
with pytest.raises(TypeError) as exc:
t1 = Table.read(f, format='hdf5')
assert exc.value.args[0] == 'h5py can only open regular files'
@pytest.mark.skipif('not HAS_H5PY')
def test_write_fileobj(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
import h5py
with h5py.File(test_file, 'w') as output_file:
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_write_filobj_group(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
import h5py
with h5py.File(test_file, 'w') as output_file:
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='path/to/data/the_table')
t2 = Table.read(test_file, path='path/to/data/the_table')
assert np.all(t2['a'] == [1, 2, 3])
@pytest.mark.skipif('not HAS_H5PY')
def test_write_wrong_type():
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(TypeError) as exc:
t1.write(1212, path='path/to/data/the_table', format='hdf5')
assert exc.value.args[0] == ('output should be a string '
'or an h5py File or Group object')
@pytest.mark.skipif('not HAS_H5PY')
@pytest.mark.parametrize(('dtype'), ALL_DTYPES)
def test_preserve_single_dtypes(tmpdir, dtype):
test_file = str(tmpdir.join('test.hdf5'))
values = _default_values(dtype)
t1 = Table()
t1.add_column(Column(name='a', data=np.array(values, dtype=dtype)))
t1.write(test_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == values)
assert t2['a'].dtype == dtype
@pytest.mark.skipif('not HAS_H5PY')
def test_preserve_all_dtypes(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
for dtype in ALL_DTYPES:
values = _default_values(dtype)
t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype)))
t1.write(test_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
for dtype in ALL_DTYPES:
values = _default_values(dtype)
assert np.all(t2[str(dtype)] == values)
assert t2[str(dtype)].dtype == dtype
@pytest.mark.skipif('not HAS_H5PY')
def test_preserve_meta(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.meta['a'] = 1
t1.meta['b'] = 'hello'
t1.meta['c'] = 3.14159
t1.meta['d'] = True
t1.meta['e'] = np.array([1, 2, 3])
t1.write(test_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
for key in t1.meta:
assert np.all(t1.meta[key] == t2.meta[key])
@pytest.mark.skipif('not HAS_H5PY or not HAS_YAML')
def test_preserve_serialized(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1['a'] = Column(data=[1, 2, 3], unit="s")
t1['a'].meta['a0'] = "A0"
t1['a'].meta['a1'] = {"a1": [0, 1]}
t1['a'].format = '7.3f'
t1['a'].description = 'A column'
t1.meta['b'] = 1
t1.meta['c'] = {"c0": [0, 1]}
t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)
t2 = Table.read(test_file, path='the_table')
assert t1['a'].unit == t2['a'].unit
assert t1['a'].format == t2['a'].format
assert t1['a'].description == t2['a'].description
assert t1['a'].meta == t2['a'].meta
assert t1.meta == t2.meta
@pytest.mark.skipif('not HAS_H5PY or not HAS_YAML')
def test_metadata_too_large(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1['a'] = Column(data=[1, 2, 3])
t1.meta["meta"] = "0" * (2**16 + 1)
with catch_warnings() as w:
t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True)
assert len(w) == 1
assert str(w[0].message).startswith(
"Attributes could not be written to the output HDF5 "
"file: Unable to create attribute ")
assert "bject header message is too large" in str(w[0].message)
@pytest.mark.skipif('not HAS_H5PY')
def test_skip_meta(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.meta['a'] = 1
t1.meta['b'] = 'hello'
t1.meta['c'] = 3.14159
t1.meta['d'] = True
t1.meta['e'] = np.array([1, 2, 3])
t1.meta['f'] = str
with catch_warnings() as w:
t1.write(test_file, path='the_table')
assert len(w) == 1
assert str(w[0].message).startswith(
"Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f'])))
@pytest.mark.skipif('not HAS_H5PY')
def test_read_h5py_objects(tmpdir):
# Regression test - ensure that Datasets are recognized automatically
test_file = str(tmpdir.join('test.hdf5'))
import h5py
with h5py.File(test_file, 'w') as output_file:
t1 = Table()
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='the_table')
f = h5py.File(test_file)
t2 = Table.read(f, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
t3 = Table.read(f['/'], path='the_table')
assert np.all(t3['a'] == [1, 2, 3])
t4 = Table.read(f['the_table'])
assert np.all(t4['a'] == [1, 2, 3])
f.close() # don't raise an error in 'test --open-files'
| 30.573876
| 106
| 0.635873
|
3cf5cf4944c82dc1e2013c870032327044ea5169
| 4,976
|
py
|
Python
|
PyTorch/Resources/Examples/01_nn_classification.py
|
methylDragon/python-data-tools-reference
|
e965720aba05c326f5cad3c864ba820ad299533a
|
[
"MIT"
] | 9
|
2019-03-03T06:47:02.000Z
|
2021-12-08T18:05:12.000Z
|
PyTorch/Resources/Examples/01_nn_classification.py
|
methylDragon/python-data-tools-reference
|
e965720aba05c326f5cad3c864ba820ad299533a
|
[
"MIT"
] | null | null | null |
PyTorch/Resources/Examples/01_nn_classification.py
|
methylDragon/python-data-tools-reference
|
e965720aba05c326f5cad3c864ba820ad299533a
|
[
"MIT"
] | 4
|
2019-03-11T00:12:03.000Z
|
2021-01-09T00:14:37.000Z
|
# Modified from SUTD
# Classification with FashionMNIST
import torchvision
from torchvision.datasets import FashionMNIST
from torch.utils.data import DataLoader
import torch
import torch.nn as nn
import torch.nn.functional as F
# MODEL ========================================================================
class FeedForwardNN(nn.Module):
def __init__(self, input_size, num_classes, num_hidden, hidden_dim, dropout):
super(FeedForwardNN, self).__init__()
assert num_hidden > 0
# Dropout and Activation
self.dropout = nn.Dropout(dropout)
self.nonlinearity = nn.ReLU()
# Hidden layers
self.hidden_layers = nn.ModuleList([])
self.hidden_layers.append(nn.Linear(input_size, hidden_dim))
for i in range(num_hidden - 1):
self.hidden_layers.append(nn.Linear(hidden_dim, hidden_dim))
# Output
self.output_projection = nn.Linear(hidden_dim, num_classes)
# input is of shape (batch_size, input_size)
def forward(self, x):
for hidden_layer in self.hidden_layers:
x = hidden_layer(x) # Apply hidden layer
x = self.dropout(x) # Apply dropout
x = self.nonlinearity(x) # Apply
out = self.output_projection(x) # Map output
# Softmax output to map to log-probability distribution over classes for each example
out_distribution = F.log_softmax(out, dim=-1)
return out_distribution
# EVALUATION ===================================================================
def evaluate_net(net, test_data):
net.eval() # Eval mode
num_correct, total_examples, total_test_loss = 0, 0, 0
for (test_images, test_labels) in test_data:
reshaped_test_images = test_images.view(-1, 784) # Reshape to fit model
if using_GPU:
reshaped_test_images = reshaped_test_images.cuda()
test_labels = test_labels.cuda()
predicted = fashionmnist_ffnn_clf(reshaped_test_images) # Forward pass
# Loss is averaged, multiply by batch size
total_test_loss += nll_criterion(predicted, test_labels) * test_labels.size(0)
_, predicted_labels = torch.max(predicted.data, 1) # Get predicted label
total_examples += test_labels.size(0)
num_correct += torch.sum(predicted_labels == test_labels)
accuracy = 100 * num_correct / total_examples
average_test_loss = total_test_loss / total_examples
print("Iteration {}. Test Loss {}. Test Accuracy {}.".format(
num_iter, average_test_loss, accuracy))
net.train() # Back to train mode
if __name__ == "__main__":
# LOAD DATA ================================================================
train_dataset = FashionMNIST(root='./torchvision-data',
train=True,
transform=torchvision.transforms.ToTensor(),
download=True)
test_dataset = FashionMNIST(root='./torchvision-data', train=False,
transform=torchvision.transforms.ToTensor())
batch_size = 64
# Dataloader automatically reshapes out data for us
# We went from dataset elements of shape (1, 28, 28) and labels of shape (1)
# To elements of (64, 1, 28, 28) and labels of shape (64) (batch size accounted for)
train_dataloader = DataLoader(
dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_dataloader = DataLoader(
dataset=test_dataset, batch_size=batch_size)
## MODEL ##
fashionmnist_ffnn_clf = FeedForwardNN(input_size=784, num_classes=10,
num_hidden=2,
hidden_dim=512, dropout=0.2)
if using_GPU: # Shift to GPU if necessary
fashionmnist_ffnn_clf = fashionmnist_ffnn_clf.cuda()
print(next(fashionmnist_ffnn_clf.parameters()).is_cuda) # True if ok
nll_criterion = nn.NLLLoss() # Loss
ffnn_optimizer = optim.SGD(fashionmnist_ffnn_clf.parameters(), # Optimizer
lr=0.1, momentum=0.9)
# TRAIN ====================================================================
num_epochs = 10
num_iter = 0 # Counter for iters done
for epoch in range(num_epochs):
print("Starting epoch {}".format(epoch + 1))
for (images, labels) in train_dataloader:
reshaped_images = images.view(-1, 784) # Reshape to fit model
if using_GPU:
reshaped_images = reshaped_images.cuda()
labels = labels.cuda()
predicted = fashionmnist_ffnn_clf(reshaped_images)
batch_loss = nll_criterion(predicted, labels)
ffnn_optimizer.zero_grad()
batch_loss.backward()
ffnn_optimizer.step()
num_iter += 1 # Increment gradient update counter
# EVALUATE =============================================================
if num_iter % 500 == 0: # Evaluate every 500 gradient updates
evaluate_net(fashionmnist_ffnn_clf, test_dataloader)
| 35.798561
| 89
| 0.615957
|
582dcae9d30fb5d1fb0151568bd83d4fbf7564ba
| 3,560
|
py
|
Python
|
app/recipe/views.py
|
DDonts/Django-recipe-api
|
a5ba3ba90ab612ee40fb09a98c7bad66ca8f19e2
|
[
"MIT"
] | null | null | null |
app/recipe/views.py
|
DDonts/Django-recipe-api
|
a5ba3ba90ab612ee40fb09a98c7bad66ca8f19e2
|
[
"MIT"
] | null | null | null |
app/recipe/views.py
|
DDonts/Django-recipe-api
|
a5ba3ba90ab612ee40fb09a98c7bad66ca8f19e2
|
[
"MIT"
] | null | null | null |
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework import viewsets, mixins, status
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from core.models import Tag, Ingredient, Recipe
from recipe import serializers
class BaseRecipeAttrViewSet(
viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateModelMixin
):
"""Base viewset for user owned recipe attributes"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def get_queryset(self):
"""Return objects for the current authenticated user only"""
assigned_only = bool(
int(self.request.query_params.get('assigned_only', 0))
)
queryset = self.queryset
if assigned_only:
queryset = queryset.filter(recipe__isnull=False)
return queryset.filter(
user=self.request.user
).order_by("-name").distinct()
def perform_create(self, serializer):
"""Create a new object"""
serializer.save(user=self.request.user)
class TagViewSet(BaseRecipeAttrViewSet):
"""Manage tags in the database"""
queryset = Tag.objects.all()
serializer_class = serializers.TagSerializer
class IngredientViewSet(BaseRecipeAttrViewSet):
"""Manage ingredients in database"""
queryset = Ingredient.objects.all()
serializer_class = serializers.IngredientSerializer
class RecipeViewSet(viewsets.ModelViewSet):
"""Manage recipes in the database"""
serializer_class = serializers.RecipeSerializer
queryset = Recipe.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def _params_to_ints(self, qs):
"""Convert a list of string IDs to a list of integers"""
return [int(str_id) for str_id in qs.split(',')]
def get_queryset(self):
"""Retrieve the recipes for the authenticated user"""
tags = self.request.query_params.get('tags')
ingredients = self.request.query_params.get('ingredients')
queryset = self.queryset
if tags:
tag_ids = self._params_to_ints(tags)
queryset = queryset.filter(tags__id__in=tag_ids)
if ingredients:
ingredient_ids = self._params_to_ints(ingredients)
queryset = queryset.filter(ingredients__id__in=ingredient_ids)
return queryset.filter(user=self.request.user)
def get_serializer_class(self):
"""Return appropriate serializer class"""
if self.action == "retrieve":
return serializers.RecipeDetailSerializer
elif self.action == "upload_image":
return serializers.RecipeImageSerializer
return self.serializer_class
def perform_create(self, serializer):
"""Create a new recipe"""
serializer.save(user=self.request.user)
@action(methods=['POST'], detail=True, url_path='upload-image')
def upload_image(self, request, pk=None):
"""Upload an image to a recipe"""
recipe = self.get_object()
serializer = self.get_serializer(
recipe,
data=request.data
)
if serializer.is_valid():
serializer.save()
return Response(
serializer.data,
status=status.HTTP_200_OK
)
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
| 32.363636
| 75
| 0.674719
|
b0fd59a2a866ec7f71f3d3c75363d5cf102e7bff
| 4,673
|
py
|
Python
|
RaspberryPi/app.py
|
DreamN/Smart-Tollbooth
|
9f3af18a84cfd871ed1eaebcab1aea8b40dd2e91
|
[
"MIT"
] | null | null | null |
RaspberryPi/app.py
|
DreamN/Smart-Tollbooth
|
9f3af18a84cfd871ed1eaebcab1aea8b40dd2e91
|
[
"MIT"
] | null | null | null |
RaspberryPi/app.py
|
DreamN/Smart-Tollbooth
|
9f3af18a84cfd871ed1eaebcab1aea8b40dd2e91
|
[
"MIT"
] | null | null | null |
#################################################################
## SMART TOLLBOOTH PROJECT ##
#################################################################
#!/usr/bin/env python
# -*- coding: utf8 -*-
import RPi.GPIO as GPIO
import paho.mqtt.client as mqtt
from picamera import PiCamera
from threading import Thread
from MainFunc import main
import os
import requests
import MFRC522
import signal
import time
import servo
Buzzer = 7
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Buzzer, GPIO.OUT)
continue_reading = True
CSERVER_URL = "https://smarttbcser.herokuapp.com/carComing"
# Ultrasonic
ECHO = 16
TRIG = 18
GPIO.setup(TRIG,GPIO.OUT)
GPIO.output(TRIG,0)
GPIO.setup(ECHO,GPIO.IN)
def on_subscribe(client, userdata, mid, granted_qos):
print("Subscribed: "+str(mid)+" "+str(granted_qos))
def on_message(client, userdata, msg):
print 'Response : accept_' + str(msg.payload)
acceptCar()
# MQTT configuration
client = mqtt.Client()
client.username_pw_set("tbrpi", "random")
client.on_subscribe = on_subscribe
client.on_message = on_message
client.connect('m13.cloudmqtt.com', 11675, 60)
client.subscribe("/CAR/RES")
client.loop_start()
camera = PiCamera()
camera.resolution = (800, 600)
def read_distance():
print 'Read Distance'
GPIO.output(TRIG,1)
time.sleep(0.00001)
GPIO.output(TRIG,0)
while GPIO.input(ECHO)== 0:
pass
start = time.time()
while GPIO.input(ECHO)== 1:
pass
stop = time.time()
return (stop - start) * 17000
def acceptCar():
print 'Access Granted!!'
servo.openBarrier()
while(1):
if(read_distance()<=8):
print 'car\'s come'
time.sleep(0.8)
while(read_distance()<=8):
print 'car\'s here'
time.sleep(0.8)
servo.closeBarrier()
print 'done'
break
else:
print 'wait for car'
def TakePic():
takeTime = time.strftime("%d-%m-%Yh%Hm%Ms%S", time.localtime())
path = "/home/pi/PicturesIn/"+takeTime+".jpg"
print("TAKING PICTURE...")
camera.capture(path)
print("COMPLETE TAKE PICTURE!\n")
return takeTime+".jpg"
# Capture SIGINT for cleanup when the script is aborted
def end_read(signal,frame):
global continue_reading
print "Ctrl+C captured, ending read."
continue_reading = False
GPIO.cleanup()
def rfid():
while continue_reading:
time.sleep(0.5)
# Scan for cards
(status,TagType) = MIFAREReader.MFRC522_Request(MIFAREReader.PICC_REQIDL)
# If a card is found
if status == MIFAREReader.MI_OK:
print "Card detected"
# Get the UID of the card
(status,uid) = MIFAREReader.MFRC522_Anticoll()
# If we have the UID, continue
if status == MIFAREReader.MI_OK:
# Print UID
suid = str(uid[0])+","+str(uid[1])+","+str(uid[2])+","+str(uid[3])
print "Card read UID: " + suid
# This is the default key for authentication
key = [0xFF,0xFF,0xFF,0xFF,0xFF,0xFF]
# Select the scanned tag
MIFAREReader.MFRC522_SelectTag(uid)
# Authenticate
status = MIFAREReader.MFRC522_Auth(MIFAREReader.PICC_AUTHENT1A, 8, key, uid)
# Check if authenticated
if status == MIFAREReader.MI_OK:
MIFAREReader.MFRC522_Read(8)
MIFAREReader.MFRC522_StopCrypto1()
else:
print "Authentication error"
GPIO.output(Buzzer, GPIO.HIGH)
time.sleep(0.4)
GPIO.output(Buzzer, GPIO.LOW)
#TAKE PIC
pathFile = TakePic()
print 'picture in'
print pathFile
print 'License plate processing...'
pathin = "/home/pi/PicturesIn/" + pathFile
print pathin
pathFile = "/home/pi/Pictures/" + pathFile
print pathFile
predict = main(pathin,pathFile)
print predict
#POST to CSERVER
try:
pic_file = {'file': open(pathFile, 'rb')}
except:
pic_file = {'file': open(pathin, 'rb')}
r = requests.post(CSERVER_URL, data={'car_rfid': suid, 'predict': predict}, files = pic_file)
print 'done'
# Hook the SIGINT
signal.signal(signal.SIGINT, end_read)
# Create an object of the class MFRC522
MIFAREReader = MFRC522.MFRC522()
servo.closeBarrier()
# Welcome message
print "Welcome to the Smart Tollbooth system"
print "Press Ctrl-C to stop."
GPIO.output(Buzzer, GPIO.LOW)
rfid()
| 27.650888
| 105
| 0.585919
|
acf1e9532b7ac771f3df57c8b67c24d73d5c6aee
| 1,721
|
py
|
Python
|
pylib/rtstats_util.py
|
mustbei/rtstats
|
f1466c2af7b0ffa1c1433ede8276f915008d0170
|
[
"Apache-2.0"
] | 2
|
2017-02-09T18:59:50.000Z
|
2017-02-09T19:07:36.000Z
|
pylib/rtstats_util.py
|
mustbei/rtstats
|
f1466c2af7b0ffa1c1433ede8276f915008d0170
|
[
"Apache-2.0"
] | 8
|
2016-10-04T14:01:01.000Z
|
2017-04-20T12:39:40.000Z
|
pylib/rtstats_util.py
|
mustbei/rtstats
|
f1466c2af7b0ffa1c1433ede8276f915008d0170
|
[
"Apache-2.0"
] | 2
|
2019-04-01T19:01:52.000Z
|
2019-11-22T20:26:31.000Z
|
"""Collection of Utilities for rtstats"""
import json
import os
os.environ["MPLCONFIGDIR"] = "/tmp" # hack
import psycopg2
import matplotlib
from matplotlib.ticker import FuncFormatter
import matplotlib.dates as mdates
matplotlib.use("agg")
import matplotlib.pyplot as plt
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
def get_config():
"""Return a dict() of our runtime configuration"""
fn = "%s/settings.json" % (
os.path.join(os.path.dirname(__file__), "../config"),
)
return json.load(open(fn))
def get_dbconn(rw=False):
"""return a database connection"""
config = get_config()
dbopts = config["databaserw" if rw is True else "databasero"]
return psycopg2.connect(
dbname=dbopts["name"],
host=dbopts["host"],
user=dbopts["user"],
password=dbopts["password"],
)
def fancy_labels(ax):
"""Make matplotlib date axis labels great again"""
xlim = ax.get_xlim()
days = xlim[1] - xlim[0]
daily = True
if days < 4:
daily = False
ax.xaxis.set_major_locator(mdates.HourLocator(range(0, 24, 4)))
elif days < 31:
ax.xaxis.set_major_locator(mdates.DayLocator([1, 8, 15, 22, 29]))
elif days < 63:
ax.xaxis.set_major_locator(mdates.DayLocator([1, 15]))
else:
ax.xaxis.set_major_locator(mdates.DayLocator([1]))
def my_formatter(x, pos=None):
x = mdates.num2date(x)
if daily:
fmt = "%-d %b"
elif pos == 0 or x.hour == 0:
fmt = "%-Hz\n%-d %b"
else:
fmt = "%-H"
return x.strftime(fmt)
ax.xaxis.set_major_formatter(FuncFormatter(my_formatter))
| 25.308824
| 73
| 0.629866
|
753cbf082451a5ca73a223f3000b69c3d37abaf7
| 57,631
|
py
|
Python
|
Unicycle Simulation/scripts/drrrts_nmpc.py
|
TSummersLab/Risk_Bounded_Nonlinear_Robot_Motion_Planning
|
717b9f07f4ed625ee33ab8ec22ce78dc2907d759
|
[
"MIT"
] | 3
|
2022-01-07T19:37:03.000Z
|
2022-03-15T08:50:28.000Z
|
Unicycle Simulation/scripts/drrrts_nmpc.py
|
TSummersLab/Risk_Bounded_Nonlinear_Robot_Motion_Planning
|
717b9f07f4ed625ee33ab8ec22ce78dc2907d759
|
[
"MIT"
] | null | null | null |
Unicycle Simulation/scripts/drrrts_nmpc.py
|
TSummersLab/Risk_Bounded_Nonlinear_Robot_Motion_Planning
|
717b9f07f4ed625ee33ab8ec22ce78dc2907d759
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Changelog:
New is v1_0:
- Create script
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Author:
Sleiman Safaoui
Email:
sleiman.safaoui@utdallas.edu
Github:
@The-SS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This script runs the RRT*-generated path, extracted by `opt_path.py` with an nmpc low level controller
Tested platform:
- Python 3.6.9 on Ubuntu 18.04 LTS (64 bit)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
###############################################################################
###############################################################################
# Import all the required libraries
import math
from casadi import *
import numpy as np
import numpy.linalg as la
import numpy.random as npr
import matplotlib.pyplot as plt
import time
import pickle
# from opt_path import load_pickle_file
import os
from plotting import animate
import copy
from matplotlib.patches import Rectangle, Ellipse
from matplotlib.collections import EllipseCollection
from matplotlib.offsetbox import AnnotationBbox, AuxTransformBox
from collision_check import *
import sys
sys.path.insert(0, '../unicycle')
sys.path.insert(0, '../rrtstar')
sys.path.insert(0, '../')
# from unicycle import lqr, plotting, tracking_controller
import UKF_Estimator as UKF_Estimator
import config
STEER_TIME = config.STEER_TIME # Maximum Steering Time Horizon
DT = config.DT # timestep between controls
SAVEPATH = config.SAVEPATH
GOALAREA = config.GOALAREA #[xmin,xmax,ymin,ymax]
RANDAREA = copy.copy(config.RANDAREA) # [xmin,xmax,ymin,ymax]
VELMAX = config.VELMAX
VELMIN = config.VELMIN
ANGVELMAX = config.ANGVELMAX
ANGVELMIN = config.ANGVELMIN
ROBRAD = config.ROBRAD # radius of robot (added as padding to environment bounds and the obstacles
OBSTACLELIST = copy.copy(config.OBSTACLELIST) # [ox,oy,wd,ht]
SIGMAW = config.SIGMAW
SIGMAV = config.SIGMAV
CROSSCOR = config.CROSSCOR
ALFA = copy.copy(config.ALFA)
QLL = config.QLL
RLL = config.RLL
QTLL = config.QTLL
# copy last alfa (env alfa) to beginning and remove the last 4 (env) alfas from the end
lastalfa = ALFA[-1]
obsalfa = ALFA[0:-4]
obsalfa.insert(0, lastalfa)
ALFA = obsalfa
def sim_state(T, x0, u, f):
f_value = f(x0, u)
st = x0+T * f_value.T
return st
#
# def load_ref_traj(input_file):
# inputs_string = "inputs"
# states_file = input_file.replace(inputs_string, "states")
#
# input_file = os.path.join(SAVEPATH, input_file)
# states_file = os.path.join(SAVEPATH, states_file)
#
# # load inputs and states
# ref_inputs = load_pickle_file(input_file)
# ref_states = load_pickle_file(states_file)
# return ref_states, ref_inputs
############################# NMPC FUNCTIONS ##################################
def SetUpSteeringLawParametersNoColAvoid(N, T, v_max, v_min, omega_max, omega_min):
"""
Sets up an IPOPT NLP solver using Casadi Opti
Inputs:
N: horizon
T: time step (sec)
v_max, v_min: maximum and minimum linear velocities in m/s
omega_max, omega_min: maximum and minimum angular velocities in rad/s
Outputs:
solver, f, n_states, n_controls, U, X, P, delta
solver: Casadi NLP solver using bonmin
f: Casadi continuous time dynamics function
n_states, n_controls: number of states and controls
U, X: Casadi input and state variables (N x n_controls and (N+1)x n_states matrices)
P: Casadi desired state parameters ((N+1) x n_states matrix)
Delta: Casadi 0-1 variables for constraints (4*num_obs vector)
"""
# Define state and input cost matrices
Q = QLL
R = RLL
QT = QTLL
opti = casadi.Opti()
# Define symbolic states using Casadi Opti
x = opti.variable()
y = opti.variable()
theta = opti.variable()
states = vertcat(x, y, theta) # all three states
n_states = states.size()[0] # number of symbolic states
# Define symbolic inputs using Cadadi SX
v = opti.variable()
omega = opti.variable()
controls = vertcat(v, omega) # both controls
n_controls = controls.size()[0] # number of symbolic inputs
# RHS of nonlinear unicycle dynamics (continuous time model)
rhs = horzcat(v * cos(theta), v * sin(theta), omega)
# Unicycle continuous time dynamics function
f = Function('f', [states, controls], [rhs], ['input_state', 'control_input'], ['rhs'])
# Casadi Opti trajectory variables/parameters for multiple shooting
U = opti.variable(N, n_controls)
X = opti.variable(N+1, n_states)
P = opti.parameter(N+1, n_states)
discrete = [False]*(N*n_controls + (N+1)*n_states) # specify U and X to be continuous variables
# Cost function
obj = 0 # objective/cost
opti.subject_to(X[0, :].T == P[0, :].T)
for i in range(N):
# add to the cost the quadratic stage cost: (x-x_des)*Q*(x-x_des)^T + u*R*u^T
obj += mtimes([U[i, :], R, U[i, :].T]) # quadratic penalty on control effort
obj += mtimes([X[i, :] - P[i, :], Q, X[i, :].T - P[i, :].T]) # quadratic penalty on deviation from reference state
# compute the next state from the dynamics
x_next_ = f(X[i, :], U[i, :]) * T + X[i, :]
# make the dynamics' next state the same as the i+1 trajectory state (multiple shooting) (satisfy dynamics)
opti.subject_to(X[i + 1, :].T == x_next_.T)
# we might not be able to get back to the original target goal state
# alternatively, we have a large penalty of being away from it
obj += mtimes([X[N, :] - P[N, :], QT, X[N, :].T - P[N, :].T])
# minimize this objective
opti.minimize(obj)
# state environment constraints
_, env_edges = get_padded_edges()
x_max_env = env_edges["right"]
x_min_env = env_edges["left"]
y_max_env = env_edges["top"]
y_min_env = env_edges["bottom"]
opti.subject_to(opti.bounded(x_min_env, X[:, 0], x_max_env))
opti.subject_to(opti.bounded(y_min_env, X[:, 1], y_max_env))
opti.subject_to(opti.bounded(-casadi.inf, X[:,2], casadi.inf))
# input constraints
opti.subject_to(opti.bounded(v_min, U[:,0], v_max))
opti.subject_to(opti.bounded(omega_min, U[:,1], omega_max))
# create a dict of the discrete flags
args = dict(discrete=discrete)
# specify the solver
# opti.solver("bonmin", args)
opti.solver("ipopt", args)
solver = opti # solver instance to return
DELTA = []
OBSPAD, ENVPAD = [], []
return solver, f, n_states, n_controls, U, X, P, DELTA, OBSPAD, ENVPAD
def SetUpSteeringLawParametersBigM(N, T, v_max, v_min, omega_max, omega_min):
"""
Sets up a BONMIN MINLP solver using Casadi Opti
Collision avoidance is encoded with Big-M formulation
Inputs:
N: horizon
T: time step (sec)
v_max, v_min: maximum and minimum linear velocities in m/s
omega_max, omega_min: maximum and minimum angular velocities in rad/s
Outputs:
solver, f, n_states, n_controls, U, X, P, delta
solver: Casadi NLP solver using bonmin
f: Casadi continuous time dynamics function
n_states, n_controls: number of states and controls
U, X: Casadi input and state variables (N x n_controls and (N+1)x n_states matrices)
P: Casadi desired state parameters ((N+1) x n_states matrix)
Delta: Casadi 0-1 variables for constraints (4*num_obs vector)
"""
# Define state and input cost matrices
Q = QLL
R = RLL
QT = QTLL
opti = casadi.Opti()
# Define symbolic states using Casadi Opti
x = opti.variable()
y = opti.variable()
theta = opti.variable()
states = vertcat(x, y, theta) # all three states
n_states = states.size()[0] # number of symbolic states
# Define symbolic inputs using Cadadi SX
v = opti.variable()
omega = opti.variable()
controls = vertcat(v, omega) # both controls
n_controls = controls.size()[0] # number of symbolic inputs
# RHS of nonlinear unicycle dynamics (continuous time model)
rhs = horzcat(v * cos(theta), v * sin(theta), omega)
# Unicycle continuous time dynamics function
f = Function('f', [states, controls], [rhs], ['input_state', 'control_input'], ['rhs'])
# Casadi Opti trajectory variables/parameters for multiple shooting
U = opti.variable(N, n_controls)
X = opti.variable(N+1, n_states)
P = opti.parameter(N+1, n_states)
discrete = [False]*(N*n_controls + (N+1)*n_states) # specify U and X to be continuous variables
# Cost function
obj = 0 # objective/cost
opti.subject_to(X[0, :].T == P[0, :].T)
for i in range(N):
# add to the cost the quadratic stage cost: (x-x_des)*Q*(x-x_des)^T + u*R*u^T
obj += mtimes([U[i, :], R, U[i, :].T]) # quadratic penalty on control effort
# compute the next state from the dynamics
x_next_ = f(X[i, :], U[i, :]) * T + X[i, :]
# make the dynamics' next state the same as the i+1 trajectory state (multiple shooting) (satisfy dynamics)
opti.subject_to(X[i + 1, :].T == x_next_.T)
# add quadratic penalty on deviation from next RRT reference state
if i == N-1: # final state
obj += mtimes([X[i + 1, :] - P[i + 1, :], QT, X[i + 1, :].T - P[i + 1, :].T])
else: # previous states
obj += mtimes([X[i + 1, :] - P[i + 1, :], Q, X[i + 1, :].T - P[i + 1, :].T])
# minimize this objective
opti.minimize(obj)
# state environment constraints
opti.subject_to(opti.bounded(-casadi.inf, X[:,2], casadi.inf)) # theta only now (x,y states added later)
# input constraints
opti.subject_to(opti.bounded(v_min, U[:,0], v_max))
opti.subject_to(opti.bounded(omega_min, U[:,1], omega_max))
# obstacle constraints using Big-M formulation TODO: TRY THE CONVEX-HULL REFORMULATION https://optimization.mccormick.northwestern.edu/index.php/Disjunctive_inequalities (it might be faster)
obs_edges, env_edges = get_padded_edges()
x_max_env = env_edges["right"]
x_min_env = env_edges["left"]
y_max_env = env_edges["top"]
y_min_env = env_edges["bottom"]
num_obs = len(obs_edges)
restrictive_deltas = True
if restrictive_deltas:
DELTA = opti.variable(4 * num_obs) # 0-1 variables to indicate if an obstacle is hit
discrete += [True] * (4 * num_obs)
else:
DELTA = opti.variable(N, 4 * num_obs) # 0-1 variables to indicate if an obstacle is hit
discrete += [True] * (4 * num_obs * N) # specify the delta variables to be discrete (with above bound --> 0-1 variables)
opti.subject_to(opti.bounded(0, DELTA, 1))
M = max(x_max_env-x_min_env, y_max_env-y_min_env)*2 + 10 # 10 # a large upper bound on x and y
# DR padding values
OBSPAD = opti.parameter(N+1, 4 * num_obs) # for each time step, each obstacle edge has its own dr padding (right, left, top, bottom)
ENVPAD = opti.parameter(N+1, 4) # for each time step, the four environment edges have their own dr padding (xmax, xmin, ymax, ymin) = (right, left, top, bottom)
opti.subject_to(opti.bounded(x_min_env + ENVPAD[:,1], X[:, 0], x_max_env - ENVPAD[:,0]))
opti.subject_to(opti.bounded(y_min_env + ENVPAD[:,3], X[:, 1], y_max_env - ENVPAD[:,2]))
for obs_num, obs in enumerate(obs_edges):
# for every obstacle
top = obs["top"]
bottom = obs["bottom"]
right = obs["right"]
left = obs["left"]
if restrictive_deltas:
opti.subject_to(opti.bounded(-M * (1 - DELTA[4 * obs_num + 0]) + right + OBSPAD[1:, 0],
X[1:, 0],
x_max_env - ENVPAD[1:, 0] + M * (
1 - DELTA[4 * obs_num + 0]))) # be to the right of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[4 * obs_num + 1]) + x_min_env + ENVPAD[1:, 1],
X[1:, 0],
left - OBSPAD[1:, 1] + M * (
1 - DELTA[4 * obs_num + 1]))) # be to the left of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[4 * obs_num + 2]) + top + OBSPAD[1:, 2],
X[1:, 1],
y_max_env - ENVPAD[1:, 2] + M * (
1 - DELTA[4 * obs_num + 2]))) # be to the top of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[4 * obs_num + 3]) + y_min_env + ENVPAD[1:, 3],
X[1:, 1],
bottom - OBSPAD[1:, 3] + M * (
1 - DELTA[4 * obs_num + 3]))) # be to the bottom of the obstacle
else:
# add Big-M formulation disjunctive constraints
opti.subject_to(opti.bounded(-M * (1 - DELTA[:, 4 * obs_num + 0]) + right + OBSPAD[1:, 0],
X[1:, 0],
x_max_env - ENVPAD[1:, 0] + M * (1 - DELTA[:, 4 * obs_num + 0]))) # be to the right of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[:, 4 * obs_num + 1]) + x_min_env + ENVPAD[1:, 1],
X[1:, 0],
left - OBSPAD[1:, 1] + M * (1 - DELTA[:, 4 * obs_num + 1]))) # be to the left of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[:, 4 * obs_num + 2]) + top + OBSPAD[1:, 2],
X[1:, 1],
y_max_env - ENVPAD[1:, 2] + M * (1 - DELTA[:, 4 * obs_num + 2]))) # be to the top of the obstacle
opti.subject_to(opti.bounded(-M * (1 - DELTA[:, 4 * obs_num + 3]) + y_min_env + ENVPAD[1:, 3],
X[1:, 1],
bottom - OBSPAD[1:, 3] + M * (1 - DELTA[:, 4 * obs_num + 3]))) # be to the bottom of the obstacle
if restrictive_deltas:
opti.subject_to(
1 <= DELTA[4 * obs_num + 0] + DELTA[4 * obs_num + 1] + DELTA[4 * obs_num + 2] + DELTA[4 * obs_num + 3])
else:
# require at least one of these constraints to be true
opti.subject_to(
1 <= DELTA[:, 4 * obs_num + 0] + DELTA[:, 4 * obs_num + 1] + DELTA[:, 4 * obs_num + 2] + DELTA[:, 4 * obs_num + 3])
# create a dict of the discrete flags
args = dict(discrete=discrete)
# specify the solver
opti.solver("bonmin", args)
solver = opti # solver instance to return
return solver, f, n_states, n_controls, U, X, P, DELTA, OBSPAD, ENVPAD
def nonlinsteerNoColAvoid(solver, x0, xT, n_states, n_controls, N, T, U, X, P, DELTA, OBSPAD, ENVPAD, current_ref_traj, current_ref_inputs, obs_pad, env_pad):
"""
Solves the nonlinear steering problem using the solver from SetUpSteeringLawParametersBigM
Inputs:
solver: Casadi NLP solver from SetUpSteeringLawParameters
x0, xT: initial and final states as (n_states)x1 ndarrays e.g. [[2.], [4.], [3.14]]
n_states, n_controls: number of states and controls
N: horizon
T: time step
lbg, lbx, ubg, ubx: lower and upper (l,u) state and input (x,g) bounds
current_ref_traj, current_ref_inputs: reference trajectory and reference inputs as Nx(n_states) ndarrays# TODO: add shapes
Outputs:
x_casadi, u_casadi: trajectory states and inputs returned by Casadi
if solution found:
states: (N+1)x(n_states) ndarray e.g. [[1 2 0], [1.2 2.4 0], [2 3.5 0]]
controls: (N)x(n_controls) ndarray e.g. [[0.5 0], [1 0.01], [1.2 -0.01]]
else, [],[] returned
"""
# Create an initial state trajectory that roughly accomplishes the desired state transfer (by interpolating)
init_states_param = np.linspace(0, 1, N + 1)
init_states = np.zeros([N + 1, n_states])
dx = xT - x0
for i in range(N + 1):
init_states[i] = (x0 + init_states_param[i] * dx).flatten()
# Create an initial input trajectory that roughly accomplishes the desired state transfer
# (using interpolated states to compute rough estimate of controls)
dist = la.norm(xT[0:2] - x0[0:2])
ang_dist = xT[2][0] - x0[2][0]
total_time = N * T
const_vel = dist / total_time
const_ang_vel = ang_dist / total_time
init_inputs = np.array([const_vel, const_ang_vel] * N).reshape(-1, 2)
## set parameter
constraint_states = []
constraint_states.append(x0.reshape(n_states))
for ref_state in current_ref_traj:
constraint_states.append(ref_state.reshape(n_states))
init_inputs = []
for ref_input in current_ref_inputs:
init_inputs.append(ref_input.reshape(n_controls))
init_inputs = np.array(init_inputs)
constraint_states = np.array(constraint_states)
solver.set_value(P, constraint_states)
solver.set_initial(X, constraint_states)
solver.set_initial(U, init_inputs)
# solver.set_initial(X, init_states)
# solver.set_initial(U, init_inputs)
try:
res = solver.solve()
except:
print('Steering NLP Failed')
return [], []
# Update the cost_total
# cost_total = res.value(self.obj) # self.opti.debug.value(self.obj)
# Obtain the optimal control input sequence
u_casadi = res.value(U) # shape: (N, n_controls)
# Get the predicted state trajectory for N time steps ahead
x_casadi = res.value(X) # shape: # (N+1, n_states)
return x_casadi, u_casadi
def nonlinsteerBigM(solver, x0, xT, n_states, n_controls, N, T, U, X, P, DELTA, OBSPAD, ENVPAD, current_ref_traj, current_ref_inputs, obs_pad, env_pad):
"""
Solves the nonlinear steering problem using the solver from SetUpSteeringLawParametersBigM
Inputs:
solver: Casadi NLP solver from SetUpSteeringLawParameters
x0, xT: initial and final states as (n_states)x1 ndarrays e.g. [[2.], [4.], [3.14]]
n_states, n_controls: number of states and controls
N: horizon
T: time step
lbg, lbx, ubg, ubx: lower and upper (l,u) state and input (x,g) bounds
current_ref_traj, current_ref_inputs: reference trajectory and reference inputs as Nx(n_states) ndarrays# TODO: add shapes
Outputs:
x_casadi, u_casadi: trajectory states and inputs returned by Casadi
if solution found:
states: (N+1)x(n_states) ndarray e.g. [[1 2 0], [1.2 2.4 0], [2 3.5 0]]
controls: (N)x(n_controls) ndarray e.g. [[0.5 0], [1 0.01], [1.2 -0.01]]
else, [],[] returned
"""
# Create an initial state trajectory that roughly accomplishes the desired state transfer (by interpolating)
init_states_param = np.linspace(0, 1, N + 1)
init_states = np.zeros([N + 1, n_states])
dx = xT - x0
for i in range(N + 1):
init_states[i] = (x0 + init_states_param[i] * dx).flatten()
# Create an initial input trajectory that roughly accomplishes the desired state transfer
# (using interpolated states to compute rough estimate of controls)
dist = la.norm(xT[0:2] - x0[0:2])
ang_dist = xT[2][0] - x0[2][0]
total_time = N * T
const_vel = dist / total_time
const_ang_vel = ang_dist / total_time
init_inputs = np.array([const_vel, const_ang_vel] * N).reshape(-1, 2)
## set parameter
constraint_states = []
constraint_states.append(x0.reshape(n_states))
for ref_state in current_ref_traj:
constraint_states.append(ref_state.reshape(n_states))
constraint_states = np.array(constraint_states)
# init_inputs = []
# for ref_input in current_ref_inputs:
# init_inputs.append(ref_input.reshape(n_controls))
# init_inputs = np.array(init_inputs)
solver.set_value(P, constraint_states)
solver.set_value(OBSPAD, obs_pad)
solver.set_value(ENVPAD, env_pad)
# solver.set_initial(X, constraint_states)
solver.set_initial(X, init_states)
solver.set_initial(U, init_inputs)
# res = solver.solve()
try:
res = solver.solve()
except:
print('Steering NLP Failed')
return [], []
# Update the cost_total
# cost_total = res.value(self.obj) # self.opti.debug.value(self.obj)
# Obtain the optimal control input sequence
u_casadi = res.value(U) # shape: (N, n_controls)
# Get the predicted state trajectory for N time steps ahead
x_casadi = res.value(X) # shape: # (N+1, n_states)
print('delta', res.value(DELTA))
return x_casadi, u_casadi
def nmpc(N,T, rrt_states, rrt_inputs, num_steps, num_states, num_inputs,
obstaclelist, envbounds, drnmpc=False, hnmpc=False):
w = np.zeros([num_steps, num_states])
v = np.zeros([num_steps, num_states])
return disturbed_nmpc(N, T, rrt_states, rrt_inputs, num_steps,
num_states, num_inputs, w, v, obstaclelist, envbounds, drnmpc, hnmpc=hnmpc)
def disturbed_nmpc(N,T, rrt_states, rrt_inputs, num_steps, num_states, num_inputs, w, v, obstaclelist, envbounds, drnmpc=True, hnmpc=True):
# if drnmpc --> use col avoidance pipeline, if not drnmpc --> just do no col avoid
# if hnmpc True --> drnmpc but with nonlinsteerNoColAvoid when fail, False --> only drnmpc
no_dr_nmpc = not drnmpc
v_max = VELMAX
v_min = VELMIN
omega_max = ANGVELMAX
omega_min = ANGVELMIN
# TODO: remove x_min, x_max, y_min, y_max from inputs
obs_edges, _ = get_padded_edges()
# Set up the Casadi solver
if drnmpc and not hnmpc:
[solver, f, _, _, U, X, P, DELTA, OBSPAD, ENVPAD] = SetUpSteeringLawParametersBigM(N, T, v_max, v_min, omega_max, omega_min)
elif hnmpc:
[solver, f, _, _, U, X, P, DELTA, OBSPAD, ENVPAD] = SetUpSteeringLawParametersBigM(N, T, v_max, v_min, omega_max, omega_min)
[solverN, _, _, _, UN, XN, PN, DELTAN, OBSPADN, ENVPADN] = SetUpSteeringLawParametersNoColAvoid(N, T, v_max, v_min, omega_max, omega_min)
elif no_dr_nmpc:
[solverN, f, _, _, UN, XN, PN, DELTAN, OBSPADN, ENVPADN] = SetUpSteeringLawParametersNoColAvoid(N, T, v_max, v_min, omega_max, omega_min)
final_input = [0.0, 0.0] # final input
final_state = sim_state(T, rrt_states[-1].reshape(3), rrt_inputs[-1], f).full().reshape(3) # final state
# pad rest of inputs and states with last state and last input for the rest of the horizon (N-1 times)
rrt_inputs = rrt_inputs.tolist() # num_steps x num_controls (e.g. 200x2)
rrt_states = rrt_states.tolist() # num_steps x num_states (e.g. 200x3)
rrt_states.append(final_state) # first append the last state; now we have (num_steps+1) x num_states (e.g. 201x3)
for _ in range(N-1):
rrt_inputs.append(final_input)
rrt_states.append(final_state)
rrt_inputs = np.array(rrt_inputs) # (num_steps+N-1) x num_controls (e.g. for N = 10: 209x2)
rrt_states = np.array(rrt_states) # (num_steps+1+N-1) x num_states (e.g. for N = 10: 210x3)
######################
# Start NMPC Tracker #
######################
# Example: N = 2
# repeat final input (= 0) N-1 times
# |
# (x0,u0)--->(x1,u1)--->(x2,u2)--->(x3,u3)--->(x4,u4)--->(x5,u5)--->(xT,u_f)--->(xT)
# | | | | | |
# current | | | | repeat final states
# state |_______|__| | N-1 times
# | | |
# N horizon |___________|
# next ref |
# inputs N horizon next ref states
# flags for function to terminate
pt_obs_collision_detected = False
line_obs_collision_detected = False
nlp_failed_flag = False
visited_states = []
applied_controls = []
current_state = rrt_states[0].reshape(num_states, 1) # x0
# check if current state is safe
collision_detected = PtObsColFlag(current_state, obstaclelist, envbounds, ROBRAD)
if collision_detected:
pt_obs_collision_detected = True
return pt_obs_collision_detected, line_obs_collision_detected, nlp_failed_flag, [], [], []
visited_states.append(current_state) # mark current state as visited states
# set the same threshold for the environment and the obstacles; e.g. alfa = [env_alfa, obs1_alfa, ..., obs5_alfa]
alfa = ALFA
SigmaW = SIGMAW
SigmaV = SIGMAV
CrossCor = CROSSCOR
# Note: num_steps = number of control steps available
# The last control will take the system to the final state
all_nmpc_planned_states = []
for itr in range(num_steps):
current_state = visited_states[-1] # Last visited state
horizon_ref_states = rrt_states[itr+1:itr+N+1] # next N rrt-planned states (N x num_states starting after current state)
horizon_ref_inputs = rrt_inputs[itr:itr+N] # next N rrt-planned inputs (N x num_controls starting at current state)
current_goal_state = horizon_ref_states[-1].reshape(num_states, 1) # end of current reference horizon states
# find covariance for all but the first state in the horizon
# first state/current state is deterministic
# covar of second state/next state is just SigmaW
# (X[t+1] = f(X[t], U[t]) + W[t]; f(.,.) is deterministic since X[t] is realized)
if drnmpc or hnmpc:
horizon_covars = ukfCovars(list(horizon_ref_states), list(horizon_ref_inputs[1:]), N-1, num_states, num_states, SigmaW, SigmaV, CrossCor, SigmaW)
env_pad, obs_pad = find_dr_padding(alfa, N, obs_edges, horizon_covars)
# index of node in horizon that with which collision avoidance should start
# (use at least 1 to avoid crashes due to state realizations in collision zone)
# obs_pad = 0*np.ones([N+1, 4 * num_obs])
# env_pad = np.zeros([N+1, 4])
# steer by solving NLP
if drnmpc or hnmpc: # if either drnmpc of hnmpc
x_casadi, u_casadi = nonlinsteerBigM(solver, current_state, current_goal_state, num_states, num_inputs, N, T, U,
X, P, DELTA, OBSPAD, ENVPAD, horizon_ref_states,
horizon_ref_inputs, obs_pad, env_pad)
if hnmpc and x_casadi == []:
obs_pad_reduced = copy.deepcopy(obs_pad)
env_pad_reduced = copy.deepcopy(env_pad)
max_tries = min(2,N) # maximum number of times to try with col avoid steering
for remove_bloating in range(max_tries):
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@ Removing ",remove_bloating,"-th bloating ad trying again @@@@@@@@@@@@@@@@")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
# remove padding requirements from an additional step
obs_pad_reduced[remove_bloating + 1] *= 0
env_pad_reduced[remove_bloating + 1] *= 0
# try again to solve the problem
x_casadi, u_casadi = nonlinsteerBigM(solver, current_state, current_goal_state, num_states, num_inputs,
N, T, U,
X, P, DELTA, OBSPAD, ENVPAD, horizon_ref_states,
horizon_ref_inputs, obs_pad_reduced, env_pad_reduced)
if not x_casadi == []: # problem solved and x_casadi is not empty
break
if no_dr_nmpc or (hnmpc and x_casadi == []):
obs_pad = []
env_pad = []
x_casadi, u_casadi = nonlinsteerNoColAvoid(solverN, current_state, current_goal_state, num_states,
num_inputs, N,
T, UN,
XN, PN, DELTAN, OBSPADN, ENVPADN, horizon_ref_states,
horizon_ref_inputs, obs_pad, env_pad)
# print("###################################################")
# print("###################################################")
# print("###################################################")
# print("###################################################")
# print("############### Switched solvers ##################")
# print("###################################################")
# print("###################################################")
# print("###################################################")
# print("###################################################")
if x_casadi == []:
nlp_failed_flag = True
print("nmpc failed at itr: ", itr)
break
all_nmpc_planned_states.append(x_casadi)
# NLP succeeded and trajectory found
nmpc_input = u_casadi[0] # input to apply at current state
nmpc_next_state = x_casadi[1] # next state after nmpc_input is applied
# realized next state with noise
realized_next_state = nmpc_next_state.reshape(num_states, 1) + w[itr].reshape(num_states, 1) + v[itr].reshape(num_states, 1)
# update the visited states and applied controls
visited_states.append(realized_next_state)
applied_controls.append(nmpc_input.reshape(num_inputs, 1))
# check if realized state is safe
collision_detected = PtObsColFlag(realized_next_state, obstaclelist, envbounds, ROBRAD)
if collision_detected:
pt_obs_collision_detected = True
break
# check if line connecting previous state and realized state is safe
collision_detected = LineObsColFlag(current_state, realized_next_state, obstaclelist, ROBRAD)
if collision_detected:
line_obs_collision_detected = True
break
realized_states = visited_states
print('Done with nmpc')
visited_states = np.array(visited_states).reshape(len(visited_states), num_states)
applied_controls = np.array(applied_controls).reshape(len(applied_controls), num_inputs)
distance_error = la.norm(final_state[0:2] - visited_states[-1][0:2])
print('Final error away from RRT* goal:', distance_error)
result_data = {'pt_obs_collision_detected': pt_obs_collision_detected,
'line_obs_collision_detected': line_obs_collision_detected,
'nlp_failed_flag': nlp_failed_flag,
'visited_states': visited_states,
'applied_controls': applied_controls,
'all_nmpc_planned_states': all_nmpc_planned_states}
return result_data
def get_padded_edges():
'''
Finds the left, right, top, and bottom padded (by robot radius) edges for the obstacles and the environment
Outputs:
obs_edges = edges of obstacles in the form of a list where each element is a dictionary with "top","bottom", "right", and "left"
env_edges = edges of environment in the form of a dictionary with "top","bottom", "right", and "left"
obs_edges should be used as (x < "left") or (x > "right") or (y < "bottom") or (y > "top")
env_edges should be used as (x > "left") and (x < "right") and (y > "bottom") and (y < "top")
'''
randArea1 = copy.copy(RANDAREA) # [xmin,xmax,ymin,ymax]
obstacleList1 = copy.copy(OBSTACLELIST) # [ox,oy,wd,ht]
# environment bounds
xmin = randArea1[0]
xmax = randArea1[1]
ymin = randArea1[2]
ymax = randArea1[3]
# thickness of env edges (doesn't matter much, anything > 0 works)
thickness = 0.1
# original environment area - width and height
width = xmax - xmin
height = ymax - ymin
env_edges = {"left": xmin+ROBRAD, "right": xmax-ROBRAD, "bottom": ymin+ROBRAD, "top": ymax-ROBRAD} # environment edges
obs_edges = []
# add enough padding for obstacles for robot radius
for obs in obstacleList1:
xmin = obs[0] - ROBRAD
xmax = xmin + obs[2] + (2 * ROBRAD)
ymin = obs[1] - ROBRAD
ymax = ymin + obs[3] + (2 * ROBRAD)
edges = {"left": xmin, "right": xmax, "bottom": ymin, "top": ymax}
obs_edges.append(edges)
return obs_edges, env_edges
def get_state_bounds(obs_edges, env_edges, state):
'''
Finds the position bounds on a state given a set of obstacles (find maximum padding along each direction before
colliding with an obstacle)
'''
eps = 0.00001 # arbitrarily small value
# current state
x = state[0]
y = state[1]
# environment bounds
x_max_env = env_edges["right"]
x_min_env = env_edges["left"]
y_max_env = env_edges["top"]
y_min_env = env_edges["bottom"]
# lists to add upper and lower bounds for x and y
# (min/max element selected from them later as the upper/lower bound)
# Initialize them with environment bounds
x_max_bounds = [x_max_env]
x_min_bounds = [x_min_env]
y_max_bounds = [y_max_env]
y_min_bounds = [y_min_env]
inside_obs_counter = 0 # check if state is inside multiple obstacles
for obs_num, obs in enumerate(obs_edges):
# obstacles
top = obs["top"]
bottom = obs["bottom"]
right = obs["right"]
left = obs["left"]
# TODO: This mess needs to be fixed. All obstacles need to be considered at once when the state could be inside
# an obstacle or when the state can be moved which might put it in an obstacle
# if the state is inside the obstacle, we have to move the state outside (to the closest bound)
if (left <= x <= right) and (bottom <= y <= top):
inside_obs_counter += 1
dr = abs(x - right) # x distance to right edge
dl = abs(x - left) # x distance to left edge
dt = abs(y - top) # y distance to top edge
db = abs(y - bottom) # y distance to bottom edge
d_list = [dr, dl, dt, db] # list of distances: right, left, top, bottom
idx = d_list.index(min(d_list)) # index of closest distance: 0-->right, 1-->left, 2-->top, 3-->bottom
if idx == 0:
x_min_bounds.append(right) # right edge is closest --> make right edge a lower bound for x
x = right + eps # move x to right edge
elif idx == 1:
x_max_bounds.append(left) # left edge is closest --> make left edge an upper bound for x
x = left - eps # move x to left edge
elif idx == 2:
y_min_bounds.append(top) # top edge is closest --> make top edge a lower bound for y
y = top + eps # move y to top edge
elif idx == 3:
y_max_bounds.append(bottom) # bottom edge is closest --> make bottom edge an upper bound for y
y = bottom - eps # move y to bottom edge
else:
print('ERROR: something is wrong')
# if drl < dtb: # state closer to right or left edge
# if dr < dl: # if x is closer to the right edge, add right edge as x lower bound
# x_min_bounds.append(right)
# else: # if x is closer to the left edge, add left edge as x upper bound
# x_max_bounds.append(left)
# else: # state closer to top or bottom edge
# if dt < db: # if y is closer to the top edge, add top edge as y lower bound
# y_min_bounds.append(top)
# else: # if y is closer to the bottom edge, add bottom edge as y upper bound
# y_max_bounds.append(bottom)
else: # state not inside an obstacle
# add left edge of obstacle to x upper bounds if current state is to the left of the obstacle
if (bottom <= y <= top) and (x <= left):
x_max_bounds.append(left)
# add right edge of obstacle to x lower bounds if current state is to the right of the obstacle
if (bottom <= y <= top) and (x >= right):
x_min_bounds.append(right)
# add bottom edge of obstacle to y upper bounds if current state is to the bottom of the obstacle
if (left <= x <= right) and (y <= bottom):
y_max_bounds.append(bottom)
# add top edge of obstacle to y lower bounds if current state is to the top of the obstacle
if (left <= x <= right) and (y >= top):
y_min_bounds.append(top)
# find maximum lower bound and minimum upper bound
xmax = min(x_max_bounds)
xmin = max(x_min_bounds)
ymax = min(y_max_bounds)
ymin = max(y_min_bounds)
for obs_num, obs in enumerate(obs_edges):
# obstacles
top = obs["top"]
bottom = obs["bottom"]
right = obs["right"]
left = obs["left"]
# TODO: This mess needs to be fixed. All obstacles need to be considered at once when the state could be inside
# an obstacle or when the state can be moved which might put it in an obstacle
# if the state is inside the obstacle, we have to move the state outside (to the closest bound)
if (left <= x <= right) and (bottom <= y <= top):
inside_obs_counter += 1
if inside_obs_counter > 1:
print('......................................................')
print('ERROR: INSIDE MULTIPLE OBSTACLES. THIS IS NOT RESOLVED')
print('******************************************************')
return []
return [xmin, xmax, ymin, ymax]
################## UKF #######################
def ukfCovars(xHist, uHist, N, numStates, numOutputs, SigmaW, SigmaV, CrossCor, start_node_covar):
'''
compute covariances at each state
xHist: list of states (list: N+1 elements each with num_states elements)
uHist: list of control inputs (list: N elements each with num_controls elements)
N: horizon length
numStates, numOutputs: number of states and outputs
SigmaW, SigmaV, CrossCor = process noise covariance, measurement noise covariance, and cross covariance between them
start_node_covar: covariance at the initial node
'''
ukf_params = {}
ukf_params["n_x"] = numStates
ukf_params["n_o"] = numOutputs
ukf_params["SigmaW"] = SigmaW
ukf_params["SigmaV"] = SigmaV
ukf_params["CrossCor"] = CrossCor
ukf_params["dT"] = DT
# Find covariances
SigmaE = start_node_covar # covariance at initial/from node
covarHist = [SigmaE]
for k in range(0, N):
x_hat = xHist[k]
u_k = uHist[k]
y_k = xHist[k+1] # (we assume perfect full state feedback so y = x)
ukf_params["x_hat"] = x_hat
ukf_params["u_k"] = u_k
ukf_params["SigmaE"] = SigmaE
ukf_params["y_k"] = y_k
ukf_estimator = UKF_Estimator.UKF() # initialize the state estimator
estimator_output = ukf_estimator.Estimate(ukf_params) # get the estimates
SigmaE = estimator_output["SigmaE"] # Unbox the covariance
covarHist.append(SigmaE.reshape(numStates, numStates))
return covarHist
################ DR Padding ##################
def find_dr_padding(alfa, N, obs_edges, horizon_covars):
'''
Finds DR padding value for each environment and obstacle edge
'''
xDir = np.array([1, 0, 0]) # x direction
yDir = np.array([0, 1, 0]) # x direction
num_obs = len(obs_edges)
env_pad = np.zeros([N + 1, 4]) # for each time step, the four environment edges have their own dr padding (right, left, top, bottom)
obs_pad = np.zeros([N + 1, 4 * num_obs]) # for each time step, each obstacle edge has its own dr padding (right, left, top, bottom)
# find tightening value for all alfa values delta = sqrt((1-alfa)/alfa)
alpha = np.array(alfa, float)
delta = (1-alpha) / alpha
delta = delta**(0.5)
print("##############################")
print(delta)
for n in range(1,N+1): # skip the first time step (no DR padding there - it is already realized)
sigma = horizon_covars[n-1] # this step's covariance
# environment dr padding
rl_pad = delta[0] * math.sqrt(xDir.T @ sigma @ xDir) # padding along right/left direction
tb_pad = delta[0] * math.sqrt(yDir.T @ sigma @ yDir) # padding along top/bottom direction
env_pad[n, 0] = rl_pad # right
env_pad[n, 1] = rl_pad # left
env_pad[n, 2] = tb_pad # top
env_pad[n, 3] = tb_pad # bottom
# obstacle padding
for ob in range(num_obs): # for every obstacle, do the above
rl_pad = delta[ob+1] * math.sqrt(xDir.T @ sigma @ xDir) # padding along right/left direction
tb_pad = delta[ob+1] * math.sqrt(yDir.T @ sigma @ yDir) # padding along top/bottom direction
obs_pad[n, 4 * ob + 0] = rl_pad # right
obs_pad[n, 4 * ob + 1] = rl_pad # left
obs_pad[n, 4 * ob + 2] = tb_pad # top
obs_pad[n, 4 * ob + 3] = tb_pad # bottom
return env_pad, obs_pad
###############################################################################
####################### FUNCTION CALLED BY MAIN() #############################
###############################################################################
#TODO:change this to support different min and max values
def drrrtstar_with_nmpc(nmpc_horizon, x_ref_hist, u_ref_hist, n, m, num_steps, w=[], v=[],
save_plot = False, save_file_name = "", drnmpc = True, hnmpc = True):
"""
runs an nmpc low level controller for the rrt* path
Inputs:
input_file: file name (only) for the optimal inputs
file_path: path to input_file
v_max, omega_max, x_max, y_max, theta_max: maximum linear and angular velocity and maximum x,y,theta values
w: generated disturbance process noise
v: generated disturbance sensor noise
animate_results: True --> animate, False --> don't animate
save_plot: True --> save plot, False --> don't save plot
ax_lim: axis limits for animation
robot_w: robot width for animation
robot_h: robot height for animation
wheel_w: robot wheel width for animation
wheel_h: robot wheel height for animation
"""
plotting_on = False
obstaclelist = copy.copy(OBSTACLELIST)
envbounds = copy.copy(RANDAREA)
robrad = ROBRAD
time_start = time.time()
# load inputs and states
rrt_states = x_ref_hist
rrt_inputs = u_ref_hist
if w == []: # no disturbance
# nmpc with no disturbance
results_data = nmpc(nmpc_horizon, DT, rrt_states, rrt_inputs, num_steps, n, m, obstaclelist, envbounds, drnmpc, hnmpc=hnmpc)
pt_obs_collision_detected = results_data["pt_obs_collision_detected"]
line_obs_collision_detected = results_data["line_obs_collision_detected"]
nlp_failed_flag = results_data["nlp_failed_flag"]
all_states_cl = results_data["visited_states"]
all_inputs_cl = results_data["applied_controls"]
all_nmpc_planned_states = results_data["all_nmpc_planned_states"]
else:
# run nmpc with disturbance
results_data = disturbed_nmpc(nmpc_horizon, DT, rrt_states, rrt_inputs, num_steps, n, m, w, v, obstaclelist, envbounds, drnmpc, hnmpc=hnmpc)
pt_obs_collision_detected = results_data["pt_obs_collision_detected"]
line_obs_collision_detected = results_data["line_obs_collision_detected"]
nlp_failed_flag = results_data["nlp_failed_flag"]
all_states_cl = results_data["visited_states"]
all_inputs_cl = results_data["applied_controls"]
all_nmpc_planned_states = results_data["all_nmpc_planned_states"]
time_stop = time.time()
run_time = time_stop - time_start
print('Total time: ', run_time)
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
if pt_obs_collision_detected:
print('Collision between realized point and an obstacle/environment')
if line_obs_collision_detected:
print('Collision between line connecting realized point and previous point with an obstacle')
if nlp_failed_flag:
print('NLP failed for some reason')
crash_idx = -1 # index when NMPC failed completely (-1 --> didn't fail)
if pt_obs_collision_detected or line_obs_collision_detected or nlp_failed_flag:
crash_idx = len(all_states_cl)
# get last visited
last_state = all_states_cl[-1]
# pad states with last one and ctrl with nothing until num_steps
zero_ctrl = all_inputs_cl[-1] * 0
all_states_cl = list(all_states_cl)
all_inputs_cl = list(all_inputs_cl)
for padding_steps in range(crash_idx, num_steps+1):
all_states_cl.append(last_state)
all_inputs_cl.append(zero_ctrl)
all_states_cl = np.array(all_states_cl).reshape(num_steps+1, n)
all_inputs_cl = np.array(all_inputs_cl).reshape(num_steps, m)
# compute final state of rrt plan # TODO: this is overkill, fix later
opti = casadi.Opti()
x,y,theta = opti.variable(), opti.variable(), opti.variable()
states = vertcat(x, y, theta) # all three states
v, omega = opti.variable(), opti.variable()
controls = vertcat(v, omega) # both controls
rhs = horzcat(v * cos(theta), v * sin(theta), omega)
f = Function('f', [states, controls], [rhs], ['input_state', 'control_input'], ['rhs'])
xtm1 = rrt_states[-1, :]
xtm1 = xtm1.reshape(1,3)
utm1 = rrt_inputs[-1, :]
utm1 = utm1.reshape(1,2)
last_rrt_state = f(xtm1, utm1) * DT + xtm1
# RRT* x,y states
# extract the x and y states in the rrt plan with computed last state appended
x_orig = np.array(rrt_states).reshape(num_steps, n)[:, 0]
x_orig = list(x_orig)
x_orig.append(last_rrt_state[0])
x_orig = np.array(x_orig)
y_orig = np.array(rrt_states).reshape(num_steps, n)[:, 1]
y_orig = list(y_orig)
y_orig.append(last_rrt_state[1])
y_orig = np.array(y_orig)
# NMPC Realized x,y states
# get the x,y states of nmpc
x_cl = np.array(all_states_cl)[:, 0]
y_cl = np.array(all_states_cl)[:, 1]
##################################################################
# PLOTTING
if plotting_on:
# environment rectangle bottom left and top right corners
xmin_randarea = RANDAREA[0]
xmax_randarea = RANDAREA[1]
ymin_randarea = RANDAREA[2]
ymax_randarea = RANDAREA[3]
# thickness of env edges (doesn't matter much, anything > 0 works)
thickness = 0.1
# original environment area - width and height
width_randarea = xmax_randarea - xmin_randarea
height_randarea = ymax_randarea - ymin_randarea
# top, bottom, right, and left rectangles for the env edges
env_bottom = [xmin_randarea - thickness, ymin_randarea - thickness, width_randarea + 2 * thickness, thickness]
env_top = [xmin_randarea - thickness, ymax_randarea, width_randarea + 2 * thickness, thickness]
env_right = [xmax_randarea, ymin_randarea - thickness, thickness, height_randarea + 2 * thickness]
env_left = [xmin_randarea - thickness, ymin_randarea - thickness, thickness, height_randarea + 2 * thickness]
# add env as obstacle
OBSTACLELIST.append(env_bottom)
OBSTACLELIST.append(env_top)
OBSTACLELIST.append(env_right)
OBSTACLELIST.append(env_left)
# Create figure
fig = plt.figure(figsize=[9, 9])
ax = fig.add_subplot(1, 1, 1) # create an axes object in the figure
# ax.axis('equal')
plt.axis([-5.2, 5.2, -5.3, 5.3])
# Plot the environment boundary
xy, w, h = (-5.0, -5.0), 10.0, 10.0
r = Rectangle(xy, w, h, fc='none', ec='gold', lw=1)
offsetbox = AuxTransformBox(ax.transData)
offsetbox.add_artist(r)
ab = AnnotationBbox(offsetbox, (xy[0] + w / 2., xy[1] + w / 2.),
boxcoords="data", pad=0.52, fontsize=20,
bboxprops=dict(facecolor="none", edgecolor='k', lw=20))
ax.add_artist(ab)
# Change ticklabel font size
plt.xticks(fontsize=32)
plt.yticks(fontsize=32)
# rough estimate of DR padding
xDir = np.array([1, 0, 0]) # x direction
yDir = np.array([1, 0, 0]) # y direction
alpha = ALFA[0]
delta = (1-alpha)/alpha
delta = delta ** 0.5
xdrpad = delta * math.sqrt(xDir.T @ SIGMAW @ xDir)
ydrpad = delta * math.sqrt(yDir.T @ SIGMAW @ yDir)
# Plot the rectangle obstacles with DR padding
obstacles = [Rectangle(xy=[ox - ROBRAD - xdrpad, oy - ROBRAD - ydrpad],
width=wd + 2 * ROBRAD + 2*xdrpad,
height=ht + 2 * ROBRAD + 2*ydrpad,
angle=0,
color="palegoldenrod") for (ox, oy, wd, ht) in OBSTACLELIST]
for obstacle in obstacles:
ax.add_artist(obstacle)
# Plot the rectangle obstacles with robot radius padding
obstacles = [Rectangle(xy=[ox - ROBRAD, oy - ROBRAD],
width=wd +2 * ROBRAD,
height=ht +2 * ROBRAD,
angle=0,
color="mistyrose") for (ox, oy, wd, ht) in OBSTACLELIST]
for obstacle in obstacles:
ax.add_artist(obstacle)
# Plot the true rectangle obstacles
obstacles = [Rectangle(xy=[ox, oy], # add radius padding
width=wd, # add radius padding
height=ht, # add radius padding
angle=0,
color="k") for (ox, oy, wd, ht) in OBSTACLELIST]
for obstacle in obstacles:
ax.add_artist(obstacle)
# plot RRT* sampled points
plt.plot(x_orig, y_orig, 'o', color='gray')
# plot NMPC realized points
plt.plot(x_cl, y_cl, 'x', color='red')
colorlist = ["blue", "green", "orangered", "purple", "lime", "coral"]
num_colors = len(colorlist)
for idx, nmpc_plan in enumerate(all_nmpc_planned_states):
nmpc_plan_x = nmpc_plan[:,0]
nmpc_plan_y = nmpc_plan[:,1]
plt.plot(nmpc_plan_x, nmpc_plan_y, color=colorlist[idx%num_colors])
if save_plot:
plot_name = save_file_name + '_plot_nmpc.png'
plt.savefig(plot_name)
plt.show()
# result_data = {'all_states_cl': all_states_cl,
# 'all_inputs_cl': all_inputs_cl,
# 'pt_obs_collision_detected': pt_obs_collision_detected,
# 'line_obs_collision_detected': line_obs_collision_detected,
# 'nlp_failed_flag': nlp_failed_flag,
# 'crash_idx': crash_idx,
# 'last_rrt_state': last_rrt_state}
collision_flag = pt_obs_collision_detected or line_obs_collision_detected or nlp_failed_flag
result_data = {'x_hist': all_states_cl[0:-1,:],
'u_hist': all_inputs_cl,
'collision_flag': collision_flag,
'collision_idx': crash_idx,
'run_time': run_time,
'nlp_failed_flag': nlp_failed_flag}
return result_data
# TODO: main is no longer up to date MUST UPDATE
# if __name__ == '__main__':
# # load file
# input_file = "OptTraj_short_v1_0_1607441105_inputs"
# x_ref_hist, u_ref_hist = load_ref_traj(input_file)
# rrt_states = x_ref_hist
# rrt_inputs = u_ref_hist
#
# v_max = VELMAX # maximum linear velocity (m/s)
# omega_max = ANGVELMAX # 0.125 * (2 * np.pi) # maximum angular velocity (rad/s)
# x_max = 5 # maximum state in the horizontal direction
# y_max = 5 # maximum state in the vertical direction
# theta_max = np.inf # maximum state in the theta direction
# ax_lim = [-6, 6, -6, 6]
# robot_w = 0.2 / 2
# robot_h = 0.5 / 2
# wheel_w = 0.5 / 2
# wheel_h = 0.005 / 2
# nmpc_horizon = STEER_TIME
#
# # Number of states, inputs
# _, n = rrt_states.shape
# num_steps, m = rrt_inputs.shape
#
# # generate disturbance
# # Time start, end
# t0, tf = 0, (num_steps) * DT
# # Sampling period
# Ts = DT
# # Time history
# t_hist = np.arange(t0, tf, Ts)
# # Number of time steps
# T = t_hist.size
# # Initial state and disturbance
# x0 = np.array(rrt_states[0, :])
# # Generate base disturbance sequence
# w_base_hist = tracking_controller.generate_disturbance_hist(T, Ts, scale=1)
# plt.plot(t_hist, w_base_hist[:, 0])
# plt.show()
# plt.plot(t_hist, w_base_hist[:, 1])
# plt.show()
# plt.plot(t_hist, w_base_hist[:, 2])
# plt.show()
#
# # simulate with no disturbances
# all_states_cl, all_inputs_cl = drrrtstar_with_nmpc(nmpc_horizon, x_ref_hist, u_ref_hist, n, m, num_steps, v_max, omega_max, x_max,
# y_max, theta_max, w=[],
# save_plot=False, save_file_name=input_file)
# animate(t_hist, all_states_cl, all_inputs_cl, x_ref_hist, u_ref_hist,
# title='NMPC, Closed-loop, reference',
# fig_offset=(1000, 400),
# axis_limits=ax_lim, robot_w=robot_w, robot_h=robot_h, wheel_w=wheel_w, wheel_h=wheel_h)
#
#
# # simulate with disturbance
# all_states_cl_dist, all_inputs_cl_dist = drrrtstar_with_nmpc(nmpc_horizon, x_ref_hist, u_ref_hist, n, m, num_steps, v_max,
# omega_max, x_max, y_max, theta_max, w=w_base_hist,
# save_plot=False,
# save_file_name=input_file)
#
# animate(t_hist, all_states_cl_dist, all_inputs_cl_dist, x_ref_hist, u_ref_hist,
# title='NMPC, Closed-loop, referdisturbedence',
# fig_offset=(1000, 400),
# axis_limits=ax_lim, robot_w=robot_w, robot_h=robot_h, wheel_w=wheel_w, wheel_h=wheel_h)
| 45.558103
| 194
| 0.580018
|
32bad00e0f89ef0bba12f5d7d6472b0409166d60
| 20,941
|
py
|
Python
|
ddtrace/contrib/django/patch.py
|
yiweig/dd-trace-py
|
bce68ed584d2c3e4e6607a4f8da632c0ea72760a
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
ddtrace/contrib/django/patch.py
|
yiweig/dd-trace-py
|
bce68ed584d2c3e4e6607a4f8da632c0ea72760a
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
ddtrace/contrib/django/patch.py
|
yiweig/dd-trace-py
|
bce68ed584d2c3e4e6607a4f8da632c0ea72760a
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
"""
The Django patching works as follows:
Django internals are instrumented via normal `patch()`.
`django.apps.registry.Apps.populate` is patched to add instrumentation for any
specific Django apps like Django Rest Framework (DRF).
"""
from inspect import getmro
from inspect import isclass
from inspect import isfunction
import sys
from ddtrace import Pin
from ddtrace import config
from ddtrace.constants import SPAN_MEASURED_KEY
from ddtrace.contrib import dbapi
from ddtrace.contrib import func_name
try:
from psycopg2._psycopg import cursor as psycopg_cursor_cls
from ddtrace.contrib.psycopg.patch import Psycopg2TracedCursor
except ImportError:
psycopg_cursor_cls = None
Psycopg2TracedCursor = None
from ddtrace.ext import SpanTypes
from ddtrace.ext import http
from ddtrace.ext import sql as sqlx
from ddtrace.internal.compat import maybe_stringify
from ddtrace.internal.logger import get_logger
from ddtrace.utils.formats import asbool
from ddtrace.utils.formats import get_env
from ddtrace.vendor import wrapt
from . import utils
from .. import trace_utils
log = get_logger(__name__)
config._add(
"django",
dict(
_default_service="django",
cache_service_name=get_env("django", "cache_service_name") or "django",
database_service_name_prefix=get_env("django", "database_service_name_prefix", default=""),
database_service_name=get_env("django", "database_service_name", default=""),
trace_fetch_methods=asbool(get_env("django", "trace_fetch_methods", default=False)),
distributed_tracing_enabled=True,
instrument_middleware=asbool(get_env("django", "instrument_middleware", default=True)),
instrument_databases=True,
instrument_caches=True,
analytics_enabled=None, # None allows the value to be overridden by the global config
analytics_sample_rate=None,
trace_query_string=None, # Default to global config
include_user_name=True,
use_handler_resource_format=asbool(get_env("django", "use_handler_resource_format", default=False)),
use_legacy_resource_format=asbool(get_env("django", "use_legacy_resource_format", default=False)),
),
)
def patch_conn(django, conn):
def cursor(django, pin, func, instance, args, kwargs):
alias = getattr(conn, "alias", "default")
if config.django.database_service_name:
service = config.django.database_service_name
else:
database_prefix = config.django.database_service_name_prefix
service = "{}{}{}".format(database_prefix, alias, "db")
vendor = getattr(conn, "vendor", "db")
prefix = sqlx.normalize_vendor(vendor)
tags = {
"django.db.vendor": vendor,
"django.db.alias": alias,
}
pin = Pin(service, tags=tags, tracer=pin.tracer, app=prefix)
cursor = func(*args, **kwargs)
traced_cursor_cls = dbapi.TracedCursor
if (
Psycopg2TracedCursor is not None
and hasattr(cursor, "cursor")
and isinstance(cursor.cursor, psycopg_cursor_cls)
):
traced_cursor_cls = Psycopg2TracedCursor
return traced_cursor_cls(cursor, pin, config.django)
if not isinstance(conn.cursor, wrapt.ObjectProxy):
conn.cursor = wrapt.FunctionWrapper(conn.cursor, trace_utils.with_traced_module(cursor)(django))
def instrument_dbs(django):
def get_connection(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
try:
patch_conn(django, conn)
except Exception:
log.debug("Error instrumenting database connection %r", conn, exc_info=True)
return conn
if not isinstance(django.db.utils.ConnectionHandler.__getitem__, wrapt.ObjectProxy):
django.db.utils.ConnectionHandler.__getitem__ = wrapt.FunctionWrapper(
django.db.utils.ConnectionHandler.__getitem__, get_connection
)
@trace_utils.with_traced_module
def traced_cache(django, pin, func, instance, args, kwargs):
if not config.django.instrument_caches:
return func(*args, **kwargs)
# get the original function method
with pin.tracer.trace("django.cache", span_type=SpanTypes.CACHE, service=config.django.cache_service_name) as span:
# update the resource name and tag the cache backend
span.resource = utils.resource_from_cache_prefix(func_name(func), instance)
cache_backend = "{}.{}".format(instance.__module__, instance.__class__.__name__)
span._set_str_tag("django.cache.backend", cache_backend)
if args:
keys = utils.quantize_key_values(args[0])
span._set_str_tag("django.cache.key", str(keys))
return func(*args, **kwargs)
def instrument_caches(django):
cache_backends = set([cache["BACKEND"] for cache in django.conf.settings.CACHES.values()])
for cache_path in cache_backends:
split = cache_path.split(".")
cache_module = ".".join(split[:-1])
cache_cls = split[-1]
for method in ["get", "set", "add", "delete", "incr", "decr", "get_many", "set_many", "delete_many"]:
try:
cls = django.utils.module_loading.import_string(cache_path)
# DEV: this can be removed when we add an idempotent `wrap`
if not trace_utils.iswrapped(cls, method):
trace_utils.wrap(cache_module, "{0}.{1}".format(cache_cls, method), traced_cache(django))
except Exception:
log.debug("Error instrumenting cache %r", cache_path, exc_info=True)
@trace_utils.with_traced_module
def traced_populate(django, pin, func, instance, args, kwargs):
"""django.apps.registry.Apps.populate is the method used to populate all the apps.
It is used as a hook to install instrumentation for 3rd party apps (like DRF).
`populate()` works in 3 phases:
- Phase 1: Initializes the app configs and imports the app modules.
- Phase 2: Imports models modules for each app.
- Phase 3: runs ready() of each app config.
If all 3 phases successfully run then `instance.ready` will be `True`.
"""
# populate() can be called multiple times, we don't want to instrument more than once
if instance.ready:
log.debug("Django instrumentation already installed, skipping.")
return func(*args, **kwargs)
ret = func(*args, **kwargs)
if not instance.ready:
log.debug("populate() failed skipping instrumentation.")
return ret
settings = django.conf.settings
# Instrument databases
if config.django.instrument_databases:
try:
instrument_dbs(django)
except Exception:
log.debug("Error instrumenting Django database connections", exc_info=True)
# Instrument caches
if config.django.instrument_caches:
try:
instrument_caches(django)
except Exception:
log.debug("Error instrumenting Django caches", exc_info=True)
# Instrument Django Rest Framework if it's installed
INSTALLED_APPS = getattr(settings, "INSTALLED_APPS", [])
if "rest_framework" in INSTALLED_APPS:
try:
from .restframework import patch_restframework
patch_restframework(django)
except Exception:
log.debug("Error patching rest_framework", exc_info=True)
return ret
def traced_func(django, name, resource=None, ignored_excs=None):
"""Returns a function to trace Django functions."""
def wrapped(django, pin, func, instance, args, kwargs):
with pin.tracer.trace(name, resource=resource) as s:
if ignored_excs:
for exc in ignored_excs:
s._ignore_exception(exc)
return func(*args, **kwargs)
return trace_utils.with_traced_module(wrapped)(django)
def traced_process_exception(django, name, resource=None):
def wrapped(django, pin, func, instance, args, kwargs):
with pin.tracer.trace(name, resource=resource) as span:
resp = func(*args, **kwargs)
# If the response code is erroneous then grab the traceback
# and set an error.
if hasattr(resp, "status_code") and 500 <= resp.status_code < 600:
span.set_traceback()
return resp
return trace_utils.with_traced_module(wrapped)(django)
@trace_utils.with_traced_module
def traced_load_middleware(django, pin, func, instance, args, kwargs):
"""Patches django.core.handlers.base.BaseHandler.load_middleware to instrument all middlewares."""
settings_middleware = []
# Gather all the middleware
if getattr(django.conf.settings, "MIDDLEWARE", None):
settings_middleware += django.conf.settings.MIDDLEWARE
if getattr(django.conf.settings, "MIDDLEWARE_CLASSES", None):
settings_middleware += django.conf.settings.MIDDLEWARE_CLASSES
# Iterate over each middleware provided in settings.py
# Each middleware can either be a function or a class
for mw_path in settings_middleware:
mw = django.utils.module_loading.import_string(mw_path)
# Instrument function-based middleware
if isfunction(mw) and not trace_utils.iswrapped(mw):
split = mw_path.split(".")
if len(split) < 2:
continue
base = ".".join(split[:-1])
attr = split[-1]
# Function-based middleware is a factory which returns a handler function for requests.
# So instead of tracing the factory, we want to trace its returned value.
# We wrap the factory to return a traced version of the handler function.
def wrapped_factory(func, instance, args, kwargs):
# r is the middleware handler function returned from the factory
r = func(*args, **kwargs)
if r:
return wrapt.FunctionWrapper(r, traced_func(django, "django.middleware", resource=mw_path))
# If r is an empty middleware function (i.e. returns None), don't wrap since NoneType cannot be called
else:
return r
trace_utils.wrap(base, attr, wrapped_factory)
# Instrument class-based middleware
elif isclass(mw):
for hook in [
"process_request",
"process_response",
"process_view",
"process_template_response",
"__call__",
]:
if hasattr(mw, hook) and not trace_utils.iswrapped(mw, hook):
trace_utils.wrap(
mw, hook, traced_func(django, "django.middleware", resource=mw_path + ".{0}".format(hook))
)
# Do a little extra for `process_exception`
if hasattr(mw, "process_exception") and not trace_utils.iswrapped(mw, "process_exception"):
res = mw_path + ".{0}".format("process_exception")
trace_utils.wrap(
mw, "process_exception", traced_process_exception(django, "django.middleware", resource=res)
)
return func(*args, **kwargs)
@trace_utils.with_traced_module
def traced_get_response(django, pin, func, instance, args, kwargs):
"""Trace django.core.handlers.base.BaseHandler.get_response() (or other implementations).
This is the main entry point for requests.
Django requests are handled by a Handler.get_response method (inherited from base.BaseHandler).
This method invokes the middleware chain and returns the response generated by the chain.
"""
request = kwargs.get("request", args[0])
if request is None:
return func(*args, **kwargs)
trace_utils.activate_distributed_headers(pin.tracer, int_config=config.django, request_headers=request.META)
with pin.tracer.trace(
"django.request",
resource=request.method,
service=trace_utils.int_service(pin, config.django),
span_type=SpanTypes.WEB,
) as span:
utils._before_request_tags(pin, span, request)
span.metrics[SPAN_MEASURED_KEY] = 1
response = None
try:
response = func(*args, **kwargs)
return response
finally:
# DEV: Always set these tags, this is where `span.resource` is set
utils._after_request_tags(pin, span, request, response)
@trace_utils.with_traced_module
def traced_template_render(django, pin, wrapped, instance, args, kwargs):
"""Instrument django.template.base.Template.render for tracing template rendering."""
template_name = maybe_stringify(getattr(instance, "name", None))
if template_name:
resource = template_name
else:
resource = "{0}.{1}".format(func_name(instance), wrapped.__name__)
with pin.tracer.trace("django.template.render", resource=resource, span_type=http.TEMPLATE) as span:
if template_name:
span._set_str_tag("django.template.name", template_name)
engine = getattr(instance, "engine", None)
if engine:
span._set_str_tag("django.template.engine.class", func_name(engine))
return wrapped(*args, **kwargs)
def instrument_view(django, view):
"""
Helper to wrap Django views.
We want to wrap all lifecycle/http method functions for every class in the MRO for this view
"""
if hasattr(view, "__mro__"):
for cls in reversed(getmro(view)):
_instrument_view(django, cls)
return _instrument_view(django, view)
def _instrument_view(django, view):
"""Helper to wrap Django views."""
# All views should be callable, double check before doing anything
if not callable(view):
return view
# Patch view HTTP methods and lifecycle methods
http_method_names = getattr(view, "http_method_names", ("get", "delete", "post", "options", "head"))
lifecycle_methods = ("setup", "dispatch", "http_method_not_allowed")
for name in list(http_method_names) + list(lifecycle_methods):
try:
func = getattr(view, name, None)
if not func or isinstance(func, wrapt.ObjectProxy):
continue
resource = "{0}.{1}".format(func_name(view), name)
op_name = "django.view.{0}".format(name)
trace_utils.wrap(view, name, traced_func(django, name=op_name, resource=resource))
except Exception:
log.debug("Failed to instrument Django view %r function %s", view, name, exc_info=True)
# Patch response methods
response_cls = getattr(view, "response_class", None)
if response_cls:
methods = ("render",)
for name in methods:
try:
func = getattr(response_cls, name, None)
# Do not wrap if the method does not exist or is already wrapped
if not func or isinstance(func, wrapt.ObjectProxy):
continue
resource = "{0}.{1}".format(func_name(response_cls), name)
op_name = "django.response.{0}".format(name)
trace_utils.wrap(response_cls, name, traced_func(django, name=op_name, resource=resource))
except Exception:
log.debug("Failed to instrument Django response %r function %s", response_cls, name, exc_info=True)
# If the view itself is not wrapped, wrap it
if not isinstance(view, wrapt.ObjectProxy):
view = wrapt.FunctionWrapper(
view, traced_func(django, "django.view", resource=func_name(view), ignored_excs=[django.http.Http404])
)
return view
@trace_utils.with_traced_module
def traced_urls_path(django, pin, wrapped, instance, args, kwargs):
"""Wrapper for url path helpers to ensure all views registered as urls are traced."""
try:
if "view" in kwargs:
kwargs["view"] = instrument_view(django, kwargs["view"])
elif len(args) >= 2:
args = list(args)
args[1] = instrument_view(django, args[1])
args = tuple(args)
except Exception:
log.debug("Failed to instrument Django url path %r %r", args, kwargs, exc_info=True)
return wrapped(*args, **kwargs)
@trace_utils.with_traced_module
def traced_as_view(django, pin, func, instance, args, kwargs):
"""
Wrapper for django's View.as_view class method
"""
try:
instrument_view(django, instance)
except Exception:
log.debug("Failed to instrument Django view %r", instance, exc_info=True)
view = func(*args, **kwargs)
return wrapt.FunctionWrapper(view, traced_func(django, "django.view", resource=func_name(view)))
@trace_utils.with_traced_module
def traced_get_asgi_application(django, pin, func, instance, args, kwargs):
from ddtrace.contrib.asgi import TraceMiddleware
def django_asgi_modifier(span, scope):
span.name = "django.request"
return TraceMiddleware(func(*args, **kwargs), integration_config=config.django, span_modifier=django_asgi_modifier)
def _patch(django):
Pin().onto(django)
trace_utils.wrap(django, "apps.registry.Apps.populate", traced_populate(django))
# DEV: this check will be replaced with import hooks in the future
if "django.core.handlers.base" not in sys.modules:
import django.core.handlers.base
if config.django.instrument_middleware:
trace_utils.wrap(django, "core.handlers.base.BaseHandler.load_middleware", traced_load_middleware(django))
trace_utils.wrap(django, "core.handlers.base.BaseHandler.get_response", traced_get_response(django))
if hasattr(django.core.handlers.base.BaseHandler, "get_response_async"):
# Have to inline this import as the module contains syntax incompatible with Python 3.5 and below
from ._asgi import traced_get_response_async
trace_utils.wrap(django, "core.handlers.base.BaseHandler.get_response_async", traced_get_response_async(django))
# Only wrap get_asgi_application if get_response_async exists. Otherwise we will effectively double-patch
# because get_response and get_asgi_application will be used.
if "django.core.asgi" not in sys.modules:
try:
import django.core.asgi
except ImportError:
pass
else:
trace_utils.wrap(django, "core.asgi.get_asgi_application", traced_get_asgi_application(django))
# DEV: this check will be replaced with import hooks in the future
if "django.template.base" not in sys.modules:
import django.template.base
trace_utils.wrap(django, "template.base.Template.render", traced_template_render(django))
# DEV: this check will be replaced with import hooks in the future
if "django.conf.urls.static" not in sys.modules:
import django.conf.urls.static
trace_utils.wrap(django, "conf.urls.url", traced_urls_path(django))
if django.VERSION >= (2, 0, 0):
trace_utils.wrap(django, "urls.path", traced_urls_path(django))
trace_utils.wrap(django, "urls.re_path", traced_urls_path(django))
# DEV: this check will be replaced with import hooks in the future
if "django.views.generic.base" not in sys.modules:
import django.views.generic.base
trace_utils.wrap(django, "views.generic.base.View.as_view", traced_as_view(django))
def patch():
# DEV: this import will eventually be replaced with the module given from an import hook
import django
if django.VERSION < (1, 10, 0):
utils.Resolver404 = django.core.urlresolvers.Resolver404
else:
utils.Resolver404 = django.urls.exceptions.Resolver404
utils.DJANGO22 = django.VERSION >= (2, 2, 0)
if getattr(django, "_datadog_patch", False):
return
_patch(django)
setattr(django, "_datadog_patch", True)
def _unpatch(django):
trace_utils.unwrap(django.apps.registry.Apps, "populate")
trace_utils.unwrap(django.core.handlers.base.BaseHandler, "load_middleware")
trace_utils.unwrap(django.core.handlers.base.BaseHandler, "get_response")
trace_utils.unwrap(django.core.handlers.base.BaseHandler, "get_response_async")
trace_utils.unwrap(django.template.base.Template, "render")
trace_utils.unwrap(django.conf.urls.static, "static")
trace_utils.unwrap(django.conf.urls, "url")
if django.VERSION >= (2, 0, 0):
trace_utils.unwrap(django.urls, "path")
trace_utils.unwrap(django.urls, "re_path")
trace_utils.unwrap(django.views.generic.base.View, "as_view")
for conn in django.db.connections.all():
trace_utils.unwrap(conn, "cursor")
trace_utils.unwrap(django.db.utils.ConnectionHandler, "__getitem__")
def unpatch():
import django
if not getattr(django, "_datadog_patch", False):
return
_unpatch(django)
setattr(django, "_datadog_patch", False)
| 39.436911
| 120
| 0.673177
|
fd0955fc83ed137615937c5cc45563d9cfbb7950
| 2,100
|
py
|
Python
|
aroundme/__main__.py
|
Rishabh570/what-is-around
|
fe7ff8e1c571b4501f5286ae84491d6a5190bf9b
|
[
"MIT"
] | null | null | null |
aroundme/__main__.py
|
Rishabh570/what-is-around
|
fe7ff8e1c571b4501f5286ae84491d6a5190bf9b
|
[
"MIT"
] | null | null | null |
aroundme/__main__.py
|
Rishabh570/what-is-around
|
fe7ff8e1c571b4501f5286ae84491d6a5190bf9b
|
[
"MIT"
] | null | null | null |
import sys
import argparse
import requests
import googlemaps
# This class provides color options
class bcolors:
HEADER = '\033[95m'
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
# This is a utility function, shows data to the user
def show(data, query):
print("\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/")
print('\n', bcolors.OKBLUE, bcolors.BOLD, bcolors.HEADER, "Here's a list of {} near you".format(query), bcolors.ENDC, '\n')
print("\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/")
print('\n')
for item in data:
print(' - ', bcolors.BOLD, item['name'], bcolors.ENDC, '\n')
print(bcolors.OKGREEN, ' * ', bcolors.ENDC,
bcolors.DARKCYAN ,'Address: ' , bcolors.ENDC,
item['formatted_address'], '\n',
bcolors.OKGREEN, ' * ', bcolors.ENDC,
bcolors.DARKCYAN ,'Rating: ' , bcolors.ENDC,
item['rating'], '\n',
)
return 'Hope to see you again :)'
# Gets the location of the user
def get_loc():
response = requests.get('http://api.ipstack.com/103.87.58.6?access_key=f42c4374d0ef9c61749c6c0e1671f200').json()
lat = response['latitude']
long = response['longitude']
return lat, long
# Searches places of particular type near user's current location
def get_places(placeType):
lat, long = get_loc()
gmaps = googlemaps.Client(key='AIzaSyBmm69zn5d5KfDNAct9DHTVs9CR0EKq_Ro')
response = gmaps.places(query=placeType ,location=(lat,long))
return show(response['results'], placeType)
# This is the main function
def main():
parser = argparse.ArgumentParser(description='Find places around you, default place is hotel')
parser.add_argument('--near', type=str, default='hotel', help='What type of place you are looking for?')
args = parser.parse_args()
return get_places(args.near)
if __name__ == "__main__":
main()
| 32.8125
| 127
| 0.602381
|
31b132d51fd69fa5ebe4bd128619481b9a005b68
| 1,672
|
py
|
Python
|
zentral/contrib/inventory/events/__init__.py
|
mikemcdonald/zentral
|
4aa03937abfbcea6480aa04bd99f4da7b8dfc923
|
[
"Apache-2.0"
] | null | null | null |
zentral/contrib/inventory/events/__init__.py
|
mikemcdonald/zentral
|
4aa03937abfbcea6480aa04bd99f4da7b8dfc923
|
[
"Apache-2.0"
] | null | null | null |
zentral/contrib/inventory/events/__init__.py
|
mikemcdonald/zentral
|
4aa03937abfbcea6480aa04bd99f4da7b8dfc923
|
[
"Apache-2.0"
] | 1
|
2020-09-09T19:26:04.000Z
|
2020-09-09T19:26:04.000Z
|
import logging
from zentral.core.events import event_cls_from_type, register_event_type
from zentral.core.events.base import BaseEvent, EventMetadata
logger = logging.getLogger('zentral.contrib.inventory.events')
ALL_EVENTS_SEARCH_DICT = {"tag": "inventory_update"}
class InventoryMachineAdded(BaseEvent):
event_type = 'inventory_machine_added'
register_event_type(InventoryMachineAdded)
class InventoryHeartbeat(BaseEvent):
event_type = 'inventory_heartbeat'
tags = ['heartbeat']
register_event_type(InventoryHeartbeat)
# Inventory update events
for attr in ('reference',
'machine',
'link',
'business_unit',
'group',
'os_version',
'system_info',
'network_interface',
'osx_app_instance',
'deb_package',
'teamviewer',
'puppet_node'):
event_type = 'inventory_{}_update'.format(attr)
event_class_name = "".join(s.title() for s in event_type.split('_'))
event_class = type(event_class_name, (BaseEvent,), {'event_type': event_type, 'tags': ['inventory_update']})
register_event_type(event_class)
def post_inventory_events(msn, events):
for index, (event_type, created_at, data) in enumerate(events):
event_cls = event_cls_from_type(event_type)
metadata = EventMetadata(event_cls.event_type,
machine_serial_number=msn,
index=index,
created_at=created_at,
tags=event_cls.tags)
event = event_cls(metadata, data)
event.post()
| 30.4
| 112
| 0.632177
|
5e08b3f133a1c8d5284d233a7632139fde21ff72
| 6,049
|
py
|
Python
|
alipay/aop/api/domain/ContractQueryDTO.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/ContractQueryDTO.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/ContractQueryDTO.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.ContractAttachDTO import ContractAttachDTO
from alipay.aop.api.domain.ContractAttachDTO import ContractAttachDTO
class ContractQueryDTO(object):
def __init__(self):
self._bussiness_number = None
self._contract_attaches = None
self._contract_code = None
self._contract_doc = None
self._contract_name = None
self._imprint_id = None
self._source_system_id = None
self._status = None
self._tenant = None
@property
def bussiness_number(self):
return self._bussiness_number
@bussiness_number.setter
def bussiness_number(self, value):
self._bussiness_number = value
@property
def contract_attaches(self):
return self._contract_attaches
@contract_attaches.setter
def contract_attaches(self, value):
if isinstance(value, list):
self._contract_attaches = list()
for i in value:
if isinstance(i, ContractAttachDTO):
self._contract_attaches.append(i)
else:
self._contract_attaches.append(ContractAttachDTO.from_alipay_dict(i))
@property
def contract_code(self):
return self._contract_code
@contract_code.setter
def contract_code(self, value):
self._contract_code = value
@property
def contract_doc(self):
return self._contract_doc
@contract_doc.setter
def contract_doc(self, value):
if isinstance(value, ContractAttachDTO):
self._contract_doc = value
else:
self._contract_doc = ContractAttachDTO.from_alipay_dict(value)
@property
def contract_name(self):
return self._contract_name
@contract_name.setter
def contract_name(self, value):
self._contract_name = value
@property
def imprint_id(self):
return self._imprint_id
@imprint_id.setter
def imprint_id(self, value):
self._imprint_id = value
@property
def source_system_id(self):
return self._source_system_id
@source_system_id.setter
def source_system_id(self, value):
self._source_system_id = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def tenant(self):
return self._tenant
@tenant.setter
def tenant(self, value):
self._tenant = value
def to_alipay_dict(self):
params = dict()
if self.bussiness_number:
if hasattr(self.bussiness_number, 'to_alipay_dict'):
params['bussiness_number'] = self.bussiness_number.to_alipay_dict()
else:
params['bussiness_number'] = self.bussiness_number
if self.contract_attaches:
if isinstance(self.contract_attaches, list):
for i in range(0, len(self.contract_attaches)):
element = self.contract_attaches[i]
if hasattr(element, 'to_alipay_dict'):
self.contract_attaches[i] = element.to_alipay_dict()
if hasattr(self.contract_attaches, 'to_alipay_dict'):
params['contract_attaches'] = self.contract_attaches.to_alipay_dict()
else:
params['contract_attaches'] = self.contract_attaches
if self.contract_code:
if hasattr(self.contract_code, 'to_alipay_dict'):
params['contract_code'] = self.contract_code.to_alipay_dict()
else:
params['contract_code'] = self.contract_code
if self.contract_doc:
if hasattr(self.contract_doc, 'to_alipay_dict'):
params['contract_doc'] = self.contract_doc.to_alipay_dict()
else:
params['contract_doc'] = self.contract_doc
if self.contract_name:
if hasattr(self.contract_name, 'to_alipay_dict'):
params['contract_name'] = self.contract_name.to_alipay_dict()
else:
params['contract_name'] = self.contract_name
if self.imprint_id:
if hasattr(self.imprint_id, 'to_alipay_dict'):
params['imprint_id'] = self.imprint_id.to_alipay_dict()
else:
params['imprint_id'] = self.imprint_id
if self.source_system_id:
if hasattr(self.source_system_id, 'to_alipay_dict'):
params['source_system_id'] = self.source_system_id.to_alipay_dict()
else:
params['source_system_id'] = self.source_system_id
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.tenant:
if hasattr(self.tenant, 'to_alipay_dict'):
params['tenant'] = self.tenant.to_alipay_dict()
else:
params['tenant'] = self.tenant
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ContractQueryDTO()
if 'bussiness_number' in d:
o.bussiness_number = d['bussiness_number']
if 'contract_attaches' in d:
o.contract_attaches = d['contract_attaches']
if 'contract_code' in d:
o.contract_code = d['contract_code']
if 'contract_doc' in d:
o.contract_doc = d['contract_doc']
if 'contract_name' in d:
o.contract_name = d['contract_name']
if 'imprint_id' in d:
o.imprint_id = d['imprint_id']
if 'source_system_id' in d:
o.source_system_id = d['source_system_id']
if 'status' in d:
o.status = d['status']
if 'tenant' in d:
o.tenant = d['tenant']
return o
| 34.175141
| 89
| 0.609688
|
dc2141b9fbd5fe516b4b88200b9b109689a03628
| 37,548
|
py
|
Python
|
ckan/controllers/group.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 1
|
2022-02-14T20:25:34.000Z
|
2022-02-14T20:25:34.000Z
|
ckan/controllers/group.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 4
|
2020-03-24T17:53:23.000Z
|
2021-03-31T19:19:03.000Z
|
ckan/controllers/group.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 3
|
2020-01-02T10:32:37.000Z
|
2021-12-22T07:20:21.000Z
|
# encoding: utf-8
import logging
import datetime
from six.moves.urllib.parse import urlencode
from pylons.i18n import get_lang
from six import string_types, text_type
import ckan.lib.base as base
import ckan.lib.helpers as h
import ckan.lib.navl.dictization_functions as dict_fns
import ckan.logic as logic
import ckan.lib.search as search
import ckan.model as model
import ckan.authz as authz
import ckan.lib.plugins
import ckan.plugins as plugins
from ckan.common import OrderedDict, c, config, request, _
log = logging.getLogger(__name__)
render = base.render
abort = base.abort
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
check_access = logic.check_access
get_action = logic.get_action
tuplize_dict = logic.tuplize_dict
clean_dict = logic.clean_dict
parse_params = logic.parse_params
lookup_group_plugin = ckan.lib.plugins.lookup_group_plugin
lookup_group_controller = ckan.lib.plugins.lookup_group_controller
class GroupController(base.BaseController):
group_types = ['group']
# hooks for subclasses
def _group_form(self, group_type=None):
return lookup_group_plugin(group_type).group_form()
def _form_to_db_schema(self, group_type=None):
return lookup_group_plugin(group_type).form_to_db_schema()
def _db_to_form_schema(self, group_type=None):
'''This is an interface to manipulate data from the database
into a format suitable for the form (optional)'''
return lookup_group_plugin(group_type).db_to_form_schema()
def _setup_template_variables(self, context, data_dict, group_type=None):
if 'type' not in data_dict:
data_dict['type'] = group_type
return lookup_group_plugin(group_type).\
setup_template_variables(context, data_dict)
def _new_template(self, group_type):
return lookup_group_plugin(group_type).new_template()
def _index_template(self, group_type):
return lookup_group_plugin(group_type).index_template()
def _about_template(self, group_type):
return lookup_group_plugin(group_type).about_template()
def _read_template(self, group_type):
return lookup_group_plugin(group_type).read_template()
def _history_template(self, group_type):
return lookup_group_plugin(group_type).history_template()
def _edit_template(self, group_type):
return lookup_group_plugin(group_type).edit_template()
def _activity_template(self, group_type):
return lookup_group_plugin(group_type).activity_template()
def _admins_template(self, group_type):
return lookup_group_plugin(group_type).admins_template()
def _bulk_process_template(self, group_type):
return lookup_group_plugin(group_type).bulk_process_template()
# end hooks
def _replace_group_org(self, string):
''' substitute organization for group if this is an org'''
return string
def _action(self, action_name):
''' select the correct group/org action '''
return get_action(self._replace_group_org(action_name))
def _check_access(self, action_name, *args, **kw):
''' select the correct group/org check_access '''
return check_access(self._replace_group_org(action_name), *args, **kw)
def _render_template(self, template_name, group_type):
''' render the correct group/org template '''
return render(self._replace_group_org(template_name),
extra_vars={'group_type': group_type})
def _guess_group_type(self, expecting_name=False):
"""
Guess the type of group from the URL.
* The default url '/group/xyz' returns None
* group_type is unicode
* this handles the case where there is a prefix on the URL
(such as /data/organization)
"""
parts = [x for x in request.path.split('/') if x]
idx = -1
if expecting_name:
idx = -2
gt = parts[idx]
return gt
def _ensure_controller_matches_group_type(self, id):
group = model.Group.get(id)
if group is None:
abort(404, _('Group not found'))
if group.type not in self.group_types:
abort(404, _('Incorrect group type'))
return group.type
@classmethod
def add_group_type(cls, group_type):
''' Notify this controller that it is to be used for a particular
group_type. (Called on plugin registration.)
'''
cls.group_types.append(group_type)
def index(self):
group_type = self._guess_group_type()
page = h.get_page_number(request.params) or 1
items_per_page = 21
context = {'model': model, 'session': model.Session,
'user': c.user, 'for_view': True,
'with_private': False}
q = c.q = request.params.get('q', '')
sort_by = c.sort_by_selected = request.params.get('sort')
try:
self._check_access('site_read', context)
self._check_access('group_list', context)
except NotAuthorized:
abort(403, _('Not authorized to see this page'))
# pass user info to context as needed to view private datasets of
# orgs correctly
if c.userobj:
context['user_id'] = c.userobj.id
context['user_is_admin'] = c.userobj.sysadmin
try:
data_dict_global_results = {
'all_fields': False,
'q': q,
'sort': sort_by,
'type': group_type or 'group',
}
global_results = self._action('group_list')(
context, data_dict_global_results)
except ValidationError as e:
if e.error_dict and e.error_dict.get('message'):
msg = e.error_dict['message']
else:
msg = str(e)
h.flash_error(msg)
c.page = h.Page([], 0)
return render(self._index_template(group_type),
extra_vars={'group_type': group_type})
data_dict_page_results = {
'all_fields': True,
'q': q,
'sort': sort_by,
'type': group_type or 'group',
'limit': items_per_page,
'offset': items_per_page * (page - 1),
'include_extras': True
}
page_results = self._action('group_list')(context,
data_dict_page_results)
c.page = h.Page(
collection=global_results,
page=page,
url=h.pager_url,
items_per_page=items_per_page,
)
c.page.items = page_results
return render(self._index_template(group_type),
extra_vars={'group_type': group_type})
def read(self, id, limit=20):
group_type = self._ensure_controller_matches_group_type(
id.split('@')[0])
context = {'model': model, 'session': model.Session,
'user': c.user,
'schema': self._db_to_form_schema(group_type=group_type),
'for_view': True}
data_dict = {'id': id, 'type': group_type}
# unicode format (decoded from utf8)
c.q = request.params.get('q', '')
try:
# Do not query for the group datasets when dictizing, as they will
# be ignored and get requested on the controller anyway
data_dict['include_datasets'] = False
# Do not query group members as they aren't used in the view
data_dict['include_users'] = False
c.group_dict = self._action('group_show')(context, data_dict)
c.group = context['group']
except (NotFound, NotAuthorized):
abort(404, _('Group not found'))
# if the user specified a group id, redirect to the group name
if data_dict['id'] == c.group_dict['id'] and \
data_dict['id'] != c.group_dict['name']:
h.redirect_to(controller=group_type, action='read',
id=c.group_dict['name'])
self._read(id, limit, group_type)
return render(self._read_template(c.group_dict['type']),
extra_vars={'group_type': group_type})
def _read(self, id, limit, group_type):
''' This is common code used by both read and bulk_process'''
context = {'model': model, 'session': model.Session,
'user': c.user,
'schema': self._db_to_form_schema(group_type=group_type),
'for_view': True, 'extras_as_string': True}
q = c.q = request.params.get('q', '')
# Search within group
if c.group_dict.get('is_organization'):
fq = 'owner_org:"%s"' % c.group_dict.get('id')
else:
fq = 'groups:"%s"' % c.group_dict.get('name')
c.description_formatted = \
h.render_markdown(c.group_dict.get('description'))
context['return_query'] = True
page = h.get_page_number(request.params)
# most search operations should reset the page counter:
params_nopage = [(k, v) for k, v in request.params.items()
if k != 'page']
sort_by = request.params.get('sort', None)
def search_url(params):
controller = lookup_group_controller(group_type)
action = 'bulk_process' if c.action == 'bulk_process' else 'read'
url = h.url_for(controller=controller, action=action, id=id)
params = [(k, v.encode('utf-8') if isinstance(v, string_types)
else str(v)) for k, v in params]
return url + u'?' + urlencode(params)
def drill_down_url(**by):
return h.add_url_param(alternative_url=None,
controller='group', action='read',
extras=dict(id=c.group_dict.get('name')),
new_params=by)
c.drill_down_url = drill_down_url
def remove_field(key, value=None, replace=None):
controller = lookup_group_controller(group_type)
return h.remove_url_param(key, value=value, replace=replace,
controller=controller, action='read',
extras=dict(id=c.group_dict.get('name')))
c.remove_field = remove_field
def pager_url(q=None, page=None):
params = list(params_nopage)
params.append(('page', page))
return search_url(params)
try:
c.fields = []
c.fields_grouped = {}
search_extras = {}
for (param, value) in request.params.items():
if param not in ['q', 'page', 'sort'] \
and len(value) and not param.startswith('_'):
if not param.startswith('ext_'):
c.fields.append((param, value))
q += ' %s: "%s"' % (param, value)
if param not in c.fields_grouped:
c.fields_grouped[param] = [value]
else:
c.fields_grouped[param].append(value)
else:
search_extras[param] = value
facets = OrderedDict()
default_facet_titles = {'organization': _('Organizations'),
'groups': _('Groups'),
'tags': _('Tags'),
'res_format': _('Formats'),
'license_id': _('Licenses')}
for facet in h.facets():
if facet in default_facet_titles:
facets[facet] = default_facet_titles[facet]
else:
facets[facet] = facet
# Facet titles
self._update_facet_titles(facets, group_type)
c.facet_titles = facets
data_dict = {
'q': q,
'fq': fq,
'include_private': True,
'facet.field': facets.keys(),
'rows': limit,
'sort': sort_by,
'start': (page - 1) * limit,
'extras': search_extras
}
context_ = dict((k, v) for (k, v) in context.items()
if k != 'schema')
query = get_action('package_search')(context_, data_dict)
c.page = h.Page(
collection=query['results'],
page=page,
url=pager_url,
item_count=query['count'],
items_per_page=limit
)
c.group_dict['package_count'] = query['count']
c.search_facets = query['search_facets']
c.search_facets_limits = {}
for facet in c.search_facets.keys():
limit = int(request.params.get('_%s_limit' % facet,
config.get('search.facets.default', 10)))
c.search_facets_limits[facet] = limit
c.page.items = query['results']
c.sort_by_selected = sort_by
except search.SearchError as se:
log.error('Group search error: %r', se.args)
c.query_error = True
c.page = h.Page(collection=[])
self._setup_template_variables(context, {'id': id},
group_type=group_type)
def _update_facet_titles(self, facets, group_type):
for plugin in plugins.PluginImplementations(plugins.IFacets):
facets = plugin.group_facets(
facets, group_type, None)
def bulk_process(self, id):
''' Allow bulk processing of datasets for an organization. Make
private/public or delete. For organization admins.'''
group_type = self._ensure_controller_matches_group_type(
id.split('@')[0])
# check we are org admin
context = {'model': model, 'session': model.Session,
'user': c.user,
'schema': self._db_to_form_schema(group_type=group_type),
'for_view': True, 'extras_as_string': True}
data_dict = {'id': id, 'type': group_type}
try:
self._check_access('bulk_update_public', context, {'org_id': id})
# Do not query for the group datasets when dictizing, as they will
# be ignored and get requested on the controller anyway
data_dict['include_datasets'] = False
c.group_dict = self._action('group_show')(context, data_dict)
c.group = context['group']
except NotFound:
abort(404, _('Group not found'))
except NotAuthorized:
abort(403, _('User %r not authorized to edit %s') % (c.user, id))
if not c.group_dict['is_organization']:
# FIXME: better error
raise Exception('Must be an organization')
# use different form names so that ie7 can be detected
form_names = set(["bulk_action.public", "bulk_action.delete",
"bulk_action.private"])
actions_in_form = set(request.params.keys())
actions = form_names.intersection(actions_in_form)
# If no action then just show the datasets
if not actions:
# unicode format (decoded from utf8)
limit = 500
self._read(id, limit, group_type)
c.packages = c.page.items
return render(self._bulk_process_template(group_type),
extra_vars={'group_type': group_type})
# ie7 puts all buttons in form params but puts submitted one twice
for key, value in dict(request.params.dict_of_lists()).items():
if len(value) == 2:
action = key.split('.')[-1]
break
else:
# normal good browser form submission
action = actions.pop().split('.')[-1]
# process the action first find the datasets to perform the action on.
# they are prefixed by dataset_ in the form data
datasets = []
for param in request.params:
if param.startswith('dataset_'):
datasets.append(param[8:])
action_functions = {
'private': 'bulk_update_private',
'public': 'bulk_update_public',
'delete': 'bulk_update_delete',
}
data_dict = {'datasets': datasets, 'org_id': c.group_dict['id']}
try:
get_action(action_functions[action])(context, data_dict)
except NotAuthorized:
abort(403, _('Not authorized to perform bulk update'))
h.redirect_to(group_type + '_bulk_process', id=id)
def new(self, data=None, errors=None, error_summary=None):
if data and 'type' in data:
group_type = data['type']
else:
group_type = self._guess_group_type(True)
if data:
data['type'] = group_type
context = {'model': model, 'session': model.Session,
'user': c.user,
'save': 'save' in request.params,
'parent': request.params.get('parent', None)}
try:
self._check_access('group_create', context)
except NotAuthorized:
abort(403, _('Unauthorized to create a group'))
if context['save'] and not data and request.method == 'POST':
return self._save_new(context, group_type)
data = data or {}
if not data.get('image_url', '').startswith('http'):
data.pop('image_url', None)
errors = errors or {}
error_summary = error_summary or {}
vars = {'data': data, 'errors': errors,
'error_summary': error_summary, 'action': 'new',
'group_type': group_type}
self._setup_template_variables(context, data, group_type=group_type)
c.form = render(self._group_form(group_type=group_type),
extra_vars=vars)
return render(self._new_template(group_type),
extra_vars={'group_type': group_type})
def edit(self, id, data=None, errors=None, error_summary=None):
group_type = self._ensure_controller_matches_group_type(
id.split('@')[0])
context = {'model': model, 'session': model.Session,
'user': c.user,
'save': 'save' in request.params,
'for_edit': True,
'parent': request.params.get('parent', None)
}
data_dict = {'id': id, 'include_datasets': False}
if context['save'] and not data and request.method == 'POST':
return self._save_edit(id, context)
try:
data_dict['include_datasets'] = False
old_data = self._action('group_show')(context, data_dict)
c.grouptitle = old_data.get('title')
c.groupname = old_data.get('name')
data = data or old_data
except (NotFound, NotAuthorized):
abort(404, _('Group not found'))
group = context.get("group")
c.group = group
c.group_dict = self._action('group_show')(context, data_dict)
try:
self._check_access('group_update', context)
except NotAuthorized:
abort(403, _('User %r not authorized to edit %s') % (c.user, id))
errors = errors or {}
vars = {'data': data, 'errors': errors,
'error_summary': error_summary, 'action': 'edit',
'group_type': group_type}
self._setup_template_variables(context, data, group_type=group_type)
c.form = render(self._group_form(group_type), extra_vars=vars)
return render(self._edit_template(c.group.type),
extra_vars={'group_type': group_type})
def _save_new(self, context, group_type=None):
try:
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(request.params))))
data_dict['type'] = group_type or 'group'
context['message'] = data_dict.get('log_message', '')
data_dict['users'] = [{'name': c.user, 'capacity': 'admin'}]
group = self._action('group_create')(context, data_dict)
# Redirect to the appropriate _read route for the type of group
h.redirect_to(group['type'] + '_read', id=group['name'])
except (NotFound, NotAuthorized) as e:
abort(404, _('Group not found'))
except dict_fns.DataError:
abort(400, _(u'Integrity Error'))
except ValidationError as e:
errors = e.error_dict
error_summary = e.error_summary
return self.new(data_dict, errors, error_summary)
def _force_reindex(self, grp):
''' When the group name has changed, we need to force a reindex
of the datasets within the group, otherwise they will stop
appearing on the read page for the group (as they're connected via
the group name)'''
group = model.Group.get(grp['name'])
for dataset in group.packages():
search.rebuild(dataset.name)
def _save_edit(self, id, context):
try:
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(request.params))))
context['message'] = data_dict.get('log_message', '')
data_dict['id'] = id
context['allow_partial_update'] = True
group = self._action('group_update')(context, data_dict)
if id != group['name']:
self._force_reindex(group)
h.redirect_to('%s_read' % group['type'], id=group['name'])
except (NotFound, NotAuthorized) as e:
abort(404, _('Group not found'))
except dict_fns.DataError:
abort(400, _(u'Integrity Error'))
except ValidationError as e:
errors = e.error_dict
error_summary = e.error_summary
return self.edit(id, data_dict, errors, error_summary)
def authz(self, id):
group = model.Group.get(id)
if group is None:
abort(404, _('Group not found'))
group_type = group.type
if group_type not in self.group_types:
abort(404, _('Incorrect group type'))
c.groupname = group.name
c.grouptitle = group.display_name
try:
context = \
{'model': model, 'user': c.user, 'group': group}
self._check_access('group_edit_permissions', context)
c.authz_editable = True
c.group = context['group']
except NotAuthorized:
c.authz_editable = False
if not c.authz_editable:
abort(403,
_('User %r not authorized to edit %s authorizations') %
(c.user, id))
roles = self._handle_update_of_authz(group)
self._prepare_authz_info_for_render(roles)
return render('group/authz.html',
extra_vars={'group_type': group_type})
def delete(self, id):
group_type = self._ensure_controller_matches_group_type(id)
if 'cancel' in request.params:
h.redirect_to(group_type + '_edit', id=id)
context = {'model': model, 'session': model.Session,
'user': c.user}
try:
self._check_access('group_delete', context, {'id': id})
except NotAuthorized:
abort(403, _('Unauthorized to delete group %s') % '')
try:
if request.method == 'POST':
self._action('group_delete')(context, {'id': id})
if group_type == 'organization':
h.flash_notice(_('Organization has been deleted.'))
elif group_type == 'group':
h.flash_notice(_('Group has been deleted.'))
else:
h.flash_notice(_('%s has been deleted.')
% _(group_type.capitalize()))
h.redirect_to(group_type + '_index')
c.group_dict = self._action('group_show')(context, {'id': id})
except NotAuthorized:
abort(403, _('Unauthorized to delete group %s') % '')
except NotFound:
abort(404, _('Group not found'))
except ValidationError as e:
h.flash_error(e.error_dict['message'])
h.redirect_to(controller='organization', action='read', id=id)
return self._render_template('group/confirm_delete.html', group_type)
def members(self, id):
group_type = self._ensure_controller_matches_group_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user}
data_dict = {'id': id}
try:
check_access('group_edit_permissions', context, data_dict)
except NotAuthorized:
abort(403,
_('User %r not authorized to edit members of %s') % (c.user,
id))
try:
c.members = self._action('member_list')(
context, {'id': id, 'object_type': 'user'}
)
data_dict['include_datasets'] = False
c.group_dict = self._action('group_show')(context, data_dict)
except NotFound:
abort(404, _('Group not found'))
return self._render_template('group/members.html', group_type)
def member_new(self, id):
group_type = self._ensure_controller_matches_group_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user}
try:
self._check_access('group_member_create', context, {'id': id})
except NotAuthorized:
abort(403, _('Unauthorized to create group %s members') % '')
try:
data_dict = {'id': id}
data_dict['include_datasets'] = False
c.group_dict = self._action('group_show')(context, data_dict)
c.roles = self._action('member_roles_list')(
context, {'group_type': group_type}
)
if request.method == 'POST':
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(request.params))))
data_dict['id'] = id
email = data_dict.get('email')
if email:
user_data_dict = {
'email': email,
'group_id': data_dict['id'],
'role': data_dict['role']
}
del data_dict['email']
user_dict = self._action('user_invite')(
context, user_data_dict)
data_dict['username'] = user_dict['name']
c.group_dict = self._action('group_member_create')(
context, data_dict)
h.redirect_to(group_type + '_members', id=id)
else:
user = request.params.get('user')
if user:
c.user_dict = \
get_action('user_show')(context, {'id': user})
c.user_role = \
authz.users_role_for_group_or_org(id, user) or 'member'
else:
c.user_role = 'member'
except NotAuthorized:
abort(403, _('Unauthorized to add member to group %s') % '')
except NotFound:
abort(404, _('Group not found'))
except ValidationError as e:
h.flash_error(e.error_summary)
return self._render_template('group/member_new.html', group_type)
def member_delete(self, id):
group_type = self._ensure_controller_matches_group_type(id)
if 'cancel' in request.params:
h.redirect_to(group_type + '_members', id=id)
context = {'model': model, 'session': model.Session,
'user': c.user}
try:
self._check_access('group_member_delete', context, {'id': id})
except NotAuthorized:
abort(403, _('Unauthorized to delete group %s members') % '')
try:
user_id = request.params.get('user')
if request.method == 'POST':
self._action('group_member_delete')(
context, {'id': id, 'user_id': user_id})
h.flash_notice(_('Group member has been deleted.'))
h.redirect_to(group_type + '_members', id=id)
c.user_dict = self._action('user_show')(context, {'id': user_id})
c.user_id = user_id
c.group_id = id
except NotAuthorized:
abort(403, _('Unauthorized to delete group %s members') % '')
except NotFound:
abort(404, _('Group not found'))
return self._render_template('group/confirm_delete_member.html',
group_type)
def history(self, id):
group_type = self._ensure_controller_matches_group_type(id)
if 'diff' in request.params or 'selected1' in request.params:
try:
params = {'id': request.params.getone('group_name'),
'diff': request.params.getone('selected1'),
'oldid': request.params.getone('selected2'),
}
except KeyError:
if 'group_name' in dict(request.params):
id = request.params.getone('group_name')
c.error = \
_('Select two revisions before doing the comparison.')
else:
params['diff_entity'] = 'group'
h.redirect_to(controller='revision', action='diff', **params)
context = {'model': model, 'session': model.Session,
'user': c.user,
'schema': self._db_to_form_schema()}
data_dict = {'id': id}
try:
c.group_dict = self._action('group_show')(context, data_dict)
c.group_revisions = self._action('group_revision_list')(context,
data_dict)
# TODO: remove
# Still necessary for the authz check in group/layout.html
c.group = context['group']
except (NotFound, NotAuthorized):
abort(404, _('Group not found'))
format = request.params.get('format', '')
if format == 'atom':
# Generate and return Atom 1.0 document.
from webhelpers.feedgenerator import Atom1Feed
feed = Atom1Feed(
title=_(u'CKAN Group Revision History'),
link=h.url_for(
group_type + '_read',
id=c.group_dict['name']),
description=_(u'Recent changes to CKAN Group: ') +
c.group_dict['display_name'],
language=text_type(get_lang()),
)
for revision_dict in c.group_revisions:
revision_date = h.date_str_to_datetime(
revision_dict['timestamp'])
try:
dayHorizon = int(request.params.get('days'))
except ValueError:
dayHorizon = 30
dayAge = (datetime.datetime.now() - revision_date).days
if dayAge >= dayHorizon:
break
if revision_dict['message']:
item_title = u'%s' % revision_dict['message'].\
split('\n')[0]
else:
item_title = u'%s' % revision_dict['id']
item_link = h.url_for(controller='revision', action='read',
id=revision_dict['id'])
item_description = _('Log message: ')
item_description += '%s' % (revision_dict['message'] or '')
item_author_name = revision_dict['author']
item_pubdate = revision_date
feed.add_item(
title=item_title,
link=item_link,
description=item_description,
author_name=item_author_name,
pubdate=item_pubdate,
)
feed.content_type = 'application/atom+xml'
return feed.writeString('utf-8')
return render(self._history_template(group_type),
extra_vars={'group_type': group_type})
def activity(self, id, offset=0):
'''Render this group's public activity stream page.'''
group_type = self._ensure_controller_matches_group_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user, 'for_view': True}
try:
c.group_dict = self._get_group_dict(id)
except (NotFound, NotAuthorized):
abort(404, _('Group not found'))
try:
# Add the group's activity stream (already rendered to HTML) to the
# template context for the group/read.html
# template to retrieve later.
c.group_activity_stream = self._action('group_activity_list_html')(
context, {'id': c.group_dict['id'], 'offset': offset})
except ValidationError as error:
base.abort(400)
return render(self._activity_template(group_type),
extra_vars={'group_type': group_type})
def follow(self, id):
'''Start following this group.'''
self._ensure_controller_matches_group_type(id)
context = {'model': model,
'session': model.Session,
'user': c.user}
data_dict = {'id': id}
try:
get_action('follow_group')(context, data_dict)
group_dict = get_action('group_show')(context, data_dict)
h.flash_success(_("You are now following {0}").format(
group_dict['title']))
id = group_dict['name']
except ValidationError as e:
error_message = (e.message or e.error_summary
or e.error_dict)
h.flash_error(error_message)
except NotAuthorized as e:
h.flash_error(e.message)
h.redirect_to(controller='group', action='read', id=id)
def unfollow(self, id):
'''Stop following this group.'''
self._ensure_controller_matches_group_type(id)
context = {'model': model,
'session': model.Session,
'user': c.user}
data_dict = {'id': id}
try:
get_action('unfollow_group')(context, data_dict)
group_dict = get_action('group_show')(context, data_dict)
h.flash_success(_("You are no longer following {0}").format(
group_dict['title']))
id = group_dict['name']
except ValidationError as e:
error_message = (e.message or e.error_summary
or e.error_dict)
h.flash_error(error_message)
except (NotFound, NotAuthorized) as e:
error_message = e.message
h.flash_error(error_message)
h.redirect_to(controller='group', action='read', id=id)
def followers(self, id):
group_type = self._ensure_controller_matches_group_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user}
c.group_dict = self._get_group_dict(id)
try:
c.followers = \
get_action('group_follower_list')(context, {'id': id})
except NotAuthorized:
abort(403, _('Unauthorized to view followers %s') % '')
return render('group/followers.html',
extra_vars={'group_type': group_type})
def admins(self, id):
group_type = self._ensure_controller_matches_group_type(id)
c.group_dict = self._get_group_dict(id)
c.admins = authz.get_group_or_org_admin_ids(id)
return render(self._admins_template(c.group_dict['type']),
extra_vars={'group_type': group_type})
def about(self, id):
group_type = self._ensure_controller_matches_group_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user}
c.group_dict = self._get_group_dict(id)
group_type = c.group_dict['type']
self._setup_template_variables(context, {'id': id},
group_type=group_type)
return render(self._about_template(group_type),
extra_vars={'group_type': group_type})
def _get_group_dict(self, id):
''' returns the result of group_show action or aborts if there is a
problem '''
context = {'model': model, 'session': model.Session,
'user': c.user,
'for_view': True}
try:
return self._action('group_show')(
context, {'id': id, 'include_datasets': False})
except (NotFound, NotAuthorized):
abort(404, _('Group not found'))
| 39.691332
| 79
| 0.558192
|
c3282225cb57a4bff30b28c4ebcfc8d617f2b650
| 25,175
|
py
|
Python
|
wagtailmodeladmin/options.py
|
rkhleics/wagtailmodeladmin
|
7fddc853bab2ff3868b8c7a03329308c55f16358
|
[
"MIT"
] | 77
|
2015-12-23T17:01:37.000Z
|
2021-06-02T19:04:50.000Z
|
wagtailmodeladmin/options.py
|
ababic/wagtailmodeladmin
|
7fddc853bab2ff3868b8c7a03329308c55f16358
|
[
"MIT"
] | 27
|
2015-08-02T13:59:41.000Z
|
2015-12-21T11:22:52.000Z
|
wagtailmodeladmin/options.py
|
rkhleics/wagtailmodeladmin
|
7fddc853bab2ff3868b8c7a03329308c55f16358
|
[
"MIT"
] | 17
|
2015-12-29T12:34:49.000Z
|
2019-11-27T13:20:10.000Z
|
import warnings
from django.contrib.auth.models import Permission
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.forms.widgets import flatatt
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.models import Filter
from wagtail.wagtailcore import hooks
from .menus import ModelAdminMenuItem, GroupMenuItem, SubMenu
from .helpers import (
PermissionHelper, PagePermissionHelper, ButtonHelper, PageButtonHelper,
get_url_pattern, get_object_specific_url_pattern, get_url_name)
from .views import (
IndexView, InspectView, CreateView, ChooseParentView, EditView,
ConfirmDeleteView, CopyRedirectView, UnpublishRedirectView)
class WagtailRegisterable(object):
"""
Base class, providing a more convenient way for ModelAdmin or
ModelAdminGroup instances to be registered with Wagtail's admin area.
"""
add_to_settings_menu = False
def register_with_wagtail(self):
@hooks.register('register_permissions')
def register_permissions():
return self.get_permissions_for_registration()
@hooks.register('register_admin_urls')
def register_admin_urls():
return self.get_admin_urls_for_registration()
menu_hook = (
'register_settings_menu_item' if self.add_to_settings_menu else
'register_admin_menu_item'
)
@hooks.register(menu_hook)
def register_admin_menu_item():
return self.get_menu_item()
class ThumbmnailMixin(object):
"""
Mixin class to help display thumbnail images in ModelAdmin listing results.
`thumb_image_field_name` must be overridden to name a ForeignKey field on
your model, linking to `wagtailimages.Image`.
"""
thumb_image_field_name = 'image'
thumb_image_filter_spec = 'fill-100x100'
thumb_image_width = 50
thumb_classname = 'admin-thumb'
thumb_col_header_text = _('image')
thumb_default = None
def admin_thumb(self, obj):
try:
image = getattr(obj, self.thumb_image_field_name, None)
except AttributeError:
raise ImproperlyConfigured(
u"The `thumb_image_field_name` attribute on your `%s` class "
"must name a field on your model." % self.__class__.__name__
)
img_attrs = {
'src': self.thumb_default,
'width': self.thumb_image_width,
'class': self.thumb_classname,
}
if image:
fltr, _ = Filter.objects.get_or_create(
spec=self.thumb_image_filter_spec)
img_attrs.update({'src': image.get_rendition(fltr).url})
return mark_safe('<img{}>'.format(flatatt(img_attrs)))
elif self.thumb_default:
return mark_safe('<img{}>'.format(flatatt(img_attrs)))
return ''
admin_thumb.short_description = thumb_col_header_text
class ModelAdmin(WagtailRegisterable):
"""
The core wagtailmodeladmin class. It provides an alternative means to
list and manage instances of a given 'model' within Wagtail's admin area.
It is essentially comprised of attributes and methods that allow a degree
of control over how the data is represented, and other methods to make the
additional functionality available via various Wagtail hooks.
"""
model = None
menu_label = None
menu_icon = None
menu_order = None
list_display = ('__str__',)
list_display_add_buttons = None
inspect_view_fields = None
inspect_view_fields_exclude = []
inspect_view_enabled = False
empty_value_display = '-'
list_filter = ()
list_select_related = False
list_per_page = 100
search_fields = None
ordering = None
parent = None
index_view_class = IndexView
create_view_class = CreateView
inspect_view_class = InspectView
edit_view_class = EditView
confirm_delete_view_class = ConfirmDeleteView
choose_parent_view_class = ChooseParentView
copy_view_class = CopyRedirectView
unpublish_view_class = UnpublishRedirectView
index_template_name = ''
create_template_name = ''
edit_template_name = ''
inspect_template_name = ''
confirm_delete_template_name = ''
choose_parent_template_name = ''
permission_helper_class = None
button_helper_class = None
index_view_extra_css = []
index_view_extra_js = []
inspect_view_extra_css = []
inspect_view_extra_js = []
form_view_extra_css = []
form_view_extra_js = []
def __init__(self, parent=None):
"""
Don't allow initialisation unless self.model is set to a valid model
"""
if not self.model or not issubclass(self.model, Model):
raise ImproperlyConfigured(
u"The model attribute on your '%s' class must be set, and "
"must be a valid Django model." % self.__class__.__name__)
self.opts = self.model._meta
self.is_pagemodel = issubclass(self.model, Page)
self.parent = parent
permission_helper_class = self.get_permission_helper_class()
self.permission_helper = permission_helper_class(self.model)
def get_permission_helper_class(self):
if self.permission_helper_class:
return self.permission_helper_class
if self.is_pagemodel:
return PagePermissionHelper
return PermissionHelper
def get_button_helper_class(self):
if self.button_helper_class:
return self.button_helper_class
if self.is_pagemodel:
return PageButtonHelper
return ButtonHelper
def get_menu_label(self):
"""
Returns the label text to be used for the menu item
"""
return self.menu_label or self.opts.verbose_name_plural.title()
def get_menu_icon(self):
"""
Returns the icon to be used for the menu item. The value is prepended
with 'icon-' to create the full icon class name. For design
consistency, the same icon is also applied to the main heading for
views called by this class
"""
if self.menu_icon:
return self.menu_icon
if self.is_pagemodel:
return 'doc-full-inverse'
return 'snippet'
def get_menu_order(self):
"""
Returns the 'order' to be applied to the menu item. 000 being first
place. Where ModelAdminGroup is used, the menu_order value should be
applied to that, and any ModelAdmin classes added to 'items'
attribute will be ordered automatically, based on their order in that
sequence.
"""
return self.menu_order or 999
def show_menu_item(self, request):
"""
Returns a boolean indicating whether the menu item should be visible
for the user in the supplied request, based on their permissions.
"""
return self.permission_helper.has_list_permission(request.user)
def get_list_display(self, request):
"""
Return a sequence containing the fields/method output to be displayed
in the list view.
"""
return self.list_display
def get_list_display_add_buttons(self, request):
"""
Return the name of the field/method from list_display where action
buttons should be added.
"""
return self.list_display_add_buttons or self.list_display[0]
def get_empty_value_display(self):
"""
Return the empty_value_display set on ModelAdmin.
"""
return mark_safe(self.empty_value_display)
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar in the list view.
"""
return self.list_filter
def get_ordering(self, request):
"""
Returns a sequence defining the default ordering for results in the
list view.
"""
return self.ordering or ()
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site.
"""
qs = self.model._default_manager.get_queryset()
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def get_search_fields(self, request):
"""
Returns a sequence defining which fields on a model should be searched
when a search is initiated from the list view.
"""
return self.search_fields or ()
def get_index_url(self):
return reverse(get_url_name(self.opts))
def get_choose_parent_url(self):
return reverse(get_url_name(self.opts, 'choose_parent'))
def get_create_url(self):
return reverse(get_url_name(self.opts, 'create'))
def get_inspect_view_fields(self):
if not self.inspect_view_fields:
found_fields = []
for f in self.model._meta.get_fields():
if f.name not in self.inspect_view_fields_exclude:
if f.concrete and (
not f.is_relation or
(not f.auto_created and f.related_model)
):
found_fields.append(f.name)
return found_fields
return self.inspect_view_fields
def get_extra_class_names_for_field_col(self, obj, field_name):
"""
Return a list of additional CSS class names to be added to the table
cell's `class` attribute when rendering the output of `field_name` for
`obj` in `index_view`.
Must always return a list or tuple.
"""
return []
def get_extra_attrs_for_field_col(self, obj, field_name):
"""
Return a dictionary of additional HTML attributes to be added to a
table cell when rendering the output of `field_name` for `obj` in
`index_view`.
Must always return a dictionary.
"""
return {}
def get_index_view_extra_css(self):
css = ['wagtailmodeladmin/css/index.css']
css.extend(self.index_view_extra_css)
return css
def get_index_view_extra_js(self):
return self.index_view_extra_js
def get_form_view_extra_css(self):
return self.form_view_extra_css
def get_form_view_extra_js(self):
return self.form_view_extra_js
def get_inspect_view_extra_css(self):
return self.inspect_view_extra_css
def get_inspect_view_extra_js(self):
return self.inspect_view_extra_js
def index_view(self, request):
"""
Instantiates a class-based view to provide listing functionality for
the assigned model. The view class used can be overridden by changing
the 'index_view_class' attribute.
"""
kwargs = {'model_admin': self}
view_class = self.index_view_class
return view_class.as_view(**kwargs)(request)
def create_view(self, request):
"""
Instantiates a class-based view to provide 'creation' functionality for
the assigned model, or redirect to Wagtail's create view if the
assigned model extends 'Page'. The view class used can be overridden by
changing the 'create_view_class' attribute.
"""
kwargs = {'model_admin': self}
view_class = self.create_view_class
return view_class.as_view(**kwargs)(request)
def inspect_view(self, request, object_id):
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.inspect_view_class
return view_class.as_view(**kwargs)(request)
def choose_parent_view(self, request):
"""
Instantiates a class-based view to provide a view that allows a parent
page to be chosen for a new object, where the assigned model extends
Wagtail's Page model, and there is more than one potential parent for
new instances. The view class used can be overridden by changing the
'choose_parent_view_class' attribute.
"""
kwargs = {'model_admin': self}
view_class = self.choose_parent_view_class
return view_class.as_view(**kwargs)(request)
def edit_view(self, request, object_id):
"""
Instantiates a class-based view to provide 'edit' functionality for the
assigned model, or redirect to Wagtail's edit view if the assigned
model extends 'Page'. The view class used can be overridden by changing
the 'edit_view_class' attribute.
"""
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.edit_view_class
return view_class.as_view(**kwargs)(request)
def confirm_delete_view(self, request, object_id):
"""
Instantiates a class-based view to provide 'delete confirmation'
functionality for the assigned model, or redirect to Wagtail's delete
confirmation view if the assigned model extends 'Page'. The view class
used can be overridden by changing the 'confirm_delete_view_class'
attribute.
"""
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.confirm_delete_view_class
return view_class.as_view(**kwargs)(request)
def unpublish_view(self, request, object_id):
"""
Instantiates a class-based view that redirects to Wagtail's 'unpublish'
view for models that extend 'Page' (if the user has sufficient
permissions). We do this via our own view so that we can reliably
control redirection of the user back to the index_view once the action
is completed. The view class used can be overridden by changing the
'unpublish_view_class' attribute.
"""
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.unpublish_view_class
return view_class.as_view(**kwargs)(request)
def copy_view(self, request, object_id):
"""
Instantiates a class-based view that redirects to Wagtail's 'copy'
view for models that extend 'Page' (if the user has sufficient
permissions). We do this via our own view so that we can reliably
control redirection of the user back to the index_view once the action
is completed. The view class used can be overridden by changing the
'copy_view_class' attribute.
"""
kwargs = {'model_admin': self, 'object_id': object_id}
view_class = self.copy_view_class
return view_class.as_view(**kwargs)(request)
def get_templates(self, action='index'):
"""
Utility function that provides a list of templates to try for a given
view, when the template isn't overridden by one of the template
attributes on the class.
"""
app = self.opts.app_label
model_name = self.opts.model_name
return [
'wagtailmodeladmin/%s/%s/%s.html' % (app, model_name, action),
'wagtailmodeladmin/%s/%s.html' % (app, action),
'wagtailmodeladmin/%s.html' % (action,),
]
def get_index_template(self):
"""
Returns a template to be used when rendering 'index_view'. If a
template is specified by the 'index_template_name' attribute, that will
be used. Otherwise, a list of preferred template names are returned,
allowing custom templates to be used by simply putting them in a
sensible location in an app's template directory.
"""
return self.index_template_name or self.get_templates('index')
def get_inspect_template(self):
"""
Returns a template to be used when rendering 'inspect_view'. If a
template is specified by the 'inspect_template_name' attribute, that
will be used. Otherwise, a list of preferred template names are
returned.
"""
return self.inspect_template_name or self.get_templates('inspect')
def get_choose_parent_template(self):
"""
Returns a template to be used when rendering 'choose_parent_view'. If a
template is specified by the 'choose_parent_template_name' attribute,
that will be used. Otherwise, a list of preferred template names are
returned.
"""
return self.choose_parent_template_name or self.get_templates(
'choose_parent')
def get_create_template(self):
"""
Returns a template to be used when rendering 'create_view'. If a
template is specified by the 'create_template_name' attribute,
that will be used. Otherwise, a list of preferred template names are
returned.
"""
return self.create_template_name or self.get_templates('create')
def get_edit_template(self):
"""
Returns a template to be used when rendering 'edit_view'. If a template
is specified by the 'edit_template_name' attribute, that will be used.
Otherwise, a list of preferred template names are returned.
"""
return self.edit_template_name or self.get_templates('edit')
def get_confirm_delete_template(self):
"""
Returns a template to be used when rendering 'confirm_delete_view'. If
a template is specified by the 'confirm_delete_template_name'
attribute, that will be used. Otherwise, a list of preferred template
names are returned.
"""
return self.confirm_delete_template_name or self.get_templates(
'confirm_delete')
def get_menu_item(self, order=None):
"""
Utilised by Wagtail's 'register_menu_item' hook to create a menu item
to access the listing view, or can be called by ModelAdminGroup
to create a SubMenu
"""
return ModelAdminMenuItem(self, order or self.get_menu_order())
def get_permissions_for_registration(self):
"""
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a model to be assigned to groups in settings. This is only required
if the model isn't a Page model, and isn't registered as a Snippet
"""
from wagtail.wagtailsnippets.models import SNIPPET_MODELS
if not self.is_pagemodel and self.model not in SNIPPET_MODELS:
return self.permission_helper.get_all_model_permissions()
return Permission.objects.none()
def get_admin_urls_for_registration(self):
"""
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
our the views that class offers.
"""
urls = (
url(get_url_pattern(self.opts),
self.index_view, name=get_url_name(self.opts)),
url(get_url_pattern(self.opts, 'create'),
self.create_view, name=get_url_name(self.opts, 'create')),
url(get_object_specific_url_pattern(self.opts, 'edit'),
self.edit_view, name=get_url_name(self.opts, 'edit')),
url(get_object_specific_url_pattern(self.opts, 'confirm_delete'),
self.confirm_delete_view,
name=get_url_name(self.opts, 'confirm_delete')),
)
if self.inspect_view_enabled:
urls = urls + (
url(get_object_specific_url_pattern(self.opts, 'inspect'),
self.inspect_view,
name=get_url_name(self.opts, 'inspect')),
)
if self.is_pagemodel:
urls = urls + (
url(get_url_pattern(self.opts, 'choose_parent'),
self.choose_parent_view,
name=get_url_name(self.opts, 'choose_parent')),
url(get_object_specific_url_pattern(self.opts, 'unpublish'),
self.unpublish_view,
name=get_url_name(self.opts, 'unpublish')),
url(get_object_specific_url_pattern(self.opts, 'copy'),
self.copy_view,
name=get_url_name(self.opts, 'copy')),
)
return urls
def construct_main_menu(self, request, menu_items):
warnings.warn((
"The 'construct_main_menu' method is now deprecated. You "
"should also remove the construct_main_menu hook from "
"wagtail_hooks.py in your app folder."), DeprecationWarning)
return menu_items
class ModelAdminGroup(WagtailRegisterable):
"""
Acts as a container for grouping together multiple PageModelAdmin and
SnippetModelAdmin instances. Creates a menu item with a SubMenu for
accessing the listing pages of those instances
"""
items = ()
menu_label = None
menu_order = None
menu_icon = None
def __init__(self):
"""
When initialising, instantiate the classes within 'items', and assign
the instances to a 'modeladmin_instances' attribute for convenient
access later
"""
self.modeladmin_instances = []
for ModelAdminClass in self.items:
self.modeladmin_instances.append(ModelAdminClass(parent=self))
def get_menu_label(self):
return self.menu_label or self.get_app_label_from_subitems()
def get_app_label_from_subitems(self):
for instance in self.modeladmin_instances:
return instance.opts.app_label.title()
return ''
def get_menu_icon(self):
return self.menu_icon or 'icon-folder-open-inverse'
def get_menu_order(self):
return self.menu_order or 999
def get_submenu_items(self):
menu_items = []
item_order = 1
for modeladmin in self.modeladmin_instances:
menu_items.append(modeladmin.get_menu_item(order=item_order))
item_order += 1
return menu_items
def get_menu_item(self):
"""
Utilised by Wagtail's 'register_menu_item' hook to create a menu
for this group with a SubMenu linking to listing pages for any
associated ModelAdmin instances
"""
if self.modeladmin_instances:
submenu = SubMenu(self.get_submenu_items())
return GroupMenuItem(self, self.get_menu_order(), submenu)
def get_permissions_for_registration(self):
"""
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a all models grouped by this class to be assigned to Groups in
settings.
"""
qs = Permission.objects.none()
for instance in self.modeladmin_instances:
qs = qs | instance.get_permissions_for_registration()
return qs
def get_admin_urls_for_registration(self):
"""
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
used by any associated ModelAdmin instances
"""
urls = []
for instance in self.modeladmin_instances:
urls.extend(instance.get_admin_urls_for_registration())
return urls
def construct_main_menu(self, request, menu_items):
warnings.warn((
"The 'construct_main_menu' method is now deprecated. You should "
"also remove the construct_main_menu hook from wagtail_hooks.py "
"in your app folder."), DeprecationWarning)
return menu_items
class PageModelAdmin(ModelAdmin):
def __init__(self, parent=None):
warnings.warn((
"The 'PageModelAdmin' class is now deprecated. You should extend "
"the 'ModelAdmin' class instead (which supports all model types)."
), DeprecationWarning)
super(PageModelAdmin, self).__init__(parent)
class SnippetModelAdmin(ModelAdmin):
def __init__(self, parent=None):
warnings.warn((
"The 'SnippetModelAdmin' class is now deprecated. You should "
"extend the 'ModelAdmin' class instead (which supports all model "
"types)."), DeprecationWarning)
super(SnippetModelAdmin, self).__init__(parent)
class AppModelAdmin(ModelAdminGroup):
pagemodeladmins = ()
snippetmodeladmins = ()
def __init__(self):
warnings.warn((
"The 'AppModelAdmin' class is now deprecated, along with the "
"pagemodeladmins and snippetmodeladmins attributes. You should "
"use 'ModelAdminGroup' class instead, and combine the contents "
"of pagemodeladmins and snippetmodeladmins into a single 'items' "
"attribute."), DeprecationWarning)
self.items = self.pagemodeladmins + self.snippetmodeladmins
super(AppModelAdmin, self).__init__()
def wagtailmodeladmin_register(wagtailmodeladmin_class):
"""
Alternative one-line method for registering ModelAdmin or ModelAdminGroup
classes with Wagtail.
"""
instance = wagtailmodeladmin_class()
instance.register_with_wagtail()
| 38.259878
| 79
| 0.659901
|
96f546ec43f13e7f34658fd1113e09e62f6cb141
| 188,933
|
py
|
Python
|
tests/functional/preview_and_dev/consts.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 3
|
2017-03-01T18:17:36.000Z
|
2019-05-15T12:32:05.000Z
|
tests/functional/preview_and_dev/consts.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 110
|
2016-03-09T16:42:24.000Z
|
2021-11-22T16:51:21.000Z
|
tests/functional/preview_and_dev/consts.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 4
|
2017-11-21T17:14:56.000Z
|
2021-04-10T19:11:26.000Z
|
# These string constants are a copy of the base64 constants in
# https://github.com/alphagov/notifications-utils/blob/master/tests/pdf_consts.py
# The source pdfs are located here - https://github.com/alphagov/notifications-utils/tree/master/tests/test_files
# cat test_files/one_page_pdf.pdf | openssl base64 -A
one_page_pdf = "JVBERi0xLjUKJbXtrvsKMyAwIG9iago8PCAvTGVuZ3RoIDQgMCBSCiAgIC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCj4+CnN0cmVhbQp4nK1UTW/bMAy961fwaB+skPqgpGuB7jBgQIsZ2GEYhrR1sh6aLkl36L8fJS9xNqddNxsBnkiKoR8fbW4VQv7t1rBYIqz36qJVZLRHNM6Vq8FzYlHylsBbp7E32we1WDXYSCK0K/W5MnVDWFEtAHWq3hW3K3hTcFfwhyBVy5PIc/8XAVOrHMCSQuUy9pdf2vfKoMaB3uAN9FyymsmaeKBHA70PdcP5saG6z9amZinL1bfsdDm8zNbdSYLAumTlh/9dG8dRhxCsBEfaXJReHgvele6eh74uW3UNW5UrGYhSmuWkFLUzDI0njcbCroNPsFFbWFwtn5663QZu97D4HmB/uzmMMDPzyUN0lPNX6roUPjNpLiEGTjpYdJwgIGufbPBj8jIOQ7/h11HktTj2tvqHQq9mvr0QHm3zh9ilcWnagSFtAjThoPAZteLpe3d0Bu04BZmVD/HMV9HKtLm6LPixYDvmwTK0OXjEqNMkHjHMwwMn6hHiLDwCTdQj4Cw82E7Ug2kWHt5N1MPmQ2bjUCfnoTFGJ6aXllN8w3KSdefyl2hZO5S1J29wMtD8Ol8onP5v6yWpmYLvl9/R8eR1RMyqWKMJybMZ63O1XHdiP66Ajrqon03Mha4KZW5kc3RyZWFtCmVuZG9iago0IDAgb2JqCiAgIDQ3MQplbmRvYmoKMiAwIG9iago8PAogICAvRXh0R1N0YXRlIDw8CiAgICAgIC9hMCA8PCAvQ0EgMSAvY2EgMSA+PgogICA+PgogICAvUGF0dGVybiA8PCAvcDcgNyAwIFIgL3A4IDggMCBSIC9wOSA5IDAgUiA+PgogICAvRm9udCA8PAogICAgICAvZi0wLTAgNSAwIFIKICAgICAgL2YtMS0wIDYgMCBSCiAgID4+Cj4+CmVuZG9iagoxMCAwIG9iago8PCAvVHlwZSAvUGFnZQogICAvUGFyZW50IDEgMCBSCiAgIC9NZWRpYUJveCBbIDAgMCA1OTUgODQxIF0KICAgL0NvbnRlbnRzIDMgMCBSCiAgIC9Hcm91cCA8PAogICAgICAvVHlwZSAvR3JvdXAKICAgICAgL1MgL1RyYW5zcGFyZW5jeQogICAgICAvSSB0cnVlCiAgICAgIC9DUyAvRGV2aWNlUkdCCiAgID4+CiAgIC9SZXNvdXJjZXMgMiAwIFIKPj4KZW5kb2JqCjcgMCBvYmoKPDwgL0xlbmd0aCAxMiAwIFIKICAgL1BhdHRlcm5UeXBlIDEKICAgL0JCb3ggWyAwIDY4IDIwNiAxMzcgXQogICAvWFN0ZXAgNTMwCiAgIC9ZU3RlcCAxMzcKICAgL1RpbGluZ1R5cGUgMQogICAvUGFpbnRUeXBlIDEKICAgL01hdHJpeCBbIDAuNzUgMCAwIDAuNzUgNDIuNTE5Njg1IDcwOS45MDM1NDMgXQogICAvUmVzb3VyY2VzIDw8IC9YT2JqZWN0IDw8IC94MTEgMTEgMCBSID4+ID4+Cj4+CnN0cmVhbQogL3gxMSBEbwogCgplbmRzdHJlYW0KZW5kb2JqCjEyIDAgb2JqCiAgIDExCmVuZG9iago4IDAgb2JqCjw8IC9MZW5ndGggMTQgMCBSCiAgIC9QYXR0ZXJuVHlwZSAxCiAgIC9CQm94IFsgMCAyOCAxMDIgNjIgXQogICAvWFN0ZXAgNjQzCiAgIC9ZU3RlcCA2MgogICAvVGlsaW5nVHlwZSAxCiAgIC9QYWludFR5cGUgMQogICAvTWF0cml4IFsgMC43NSAwIDAgMC43NSA2OS43MzIyODMgNTkzLjI0MDE1NyBdCiAgIC9SZXNvdXJjZXMgPDwgL1hPYmplY3QgPDwgL3gxMyAxMyAwIFIgPj4gPj4KPj4Kc3RyZWFtCiAveDEzIERvCiAKCmVuZHN0cmVhbQplbmRvYmoKMTQgMCBvYmoKICAgMTEKZW5kb2JqCjkgMCBvYmoKPDwgL0xlbmd0aCAxNiAwIFIKICAgL1BhdHRlcm5UeXBlIDEKICAgL0JCb3ggWyAwIDAgMjYgMjYgXQogICAvWFN0ZXAgMjYKICAgL1lTdGVwIDI2CiAgIC9UaWxpbmdUeXBlIDEKICAgL1BhaW50VHlwZSAxCiAgIC9NYXRyaXggWyAwLjc1IDAgMCAwLjc1IDEyLjQ3MjQ0MSA3MTYuOTAxNTc1IF0KICAgL1Jlc291cmNlcyA8PCAvWE9iamVjdCA8PCAveDE1IDE1IDAgUiA+PiA+Pgo+PgpzdHJlYW0KIC94MTUgRG8KIAoKZW5kc3RyZWFtCmVuZG9iagoxNiAwIG9iagogICAxMQplbmRvYmoKMTEgMCBvYmoKPDwgL0xlbmd0aCAxOCAwIFIKICAgL0ZpbHRlciAvRmxhdGVEZWNvZGUKICAgL1R5cGUgL1hPYmplY3QKICAgL1N1YnR5cGUgL0Zvcm0KICAgL0JCb3ggWyAwIDY4IDIwNiAxMzcgXQogICAvUmVzb3VyY2VzIDE3IDAgUgo+PgpzdHJlYW0KeJwlyTsKgDAURNH+rWI2YDIvP5PeDaSyFhErlWjj8hXCaS7cJhVNCPUjHKNRpxhSNvSKe8OMU5r0QeYC/voOJfUuKUcGrIfYhdgf2FcLpkvq7wO8OxQaCmVuZHN0cmVhbQplbmRvYmoKMTggMCBvYmoKICAgODcKZW5kb2JqCjE3IDAgb2JqCjw8CiAgIC9FeHRHU3RhdGUgPDwKICAgICAgL2EwIDw8IC9DQSAxIC9jYSAxID4+CiAgID4+CiAgIC9YT2JqZWN0IDw8IC94MTkgMTkgMCBSID4+Cj4+CmVuZG9iagoxMyAwIG9iago8PCAvTGVuZ3RoIDIxIDAgUgogICAvRmlsdGVyIC9GbGF0ZURlY29kZQogICAvVHlwZSAvWE9iamVjdAogICAvU3VidHlwZSAvRm9ybQogICAvQkJveCBbIDAgMjggMTAyIDYyIF0KICAgL1Jlc291cmNlcyAyMCAwIFIKPj4Kc3RyZWFtCnicJckxDoAgDEbhvaf4LyC0BarsXoDJ2RjjpAZdPL5E8y0veZUKKjFMISzOLKILwVlvuFZMOKjSP1QM3Hw3pSytdXBBY+SMZSc/M7Yb/lHFeFJpXpfTE68KZW5kc3RyZWFtCmVuZG9iagoyMSAwIG9iagogICA4OAplbmRvYmoKMjAgMCBvYmoKPDwKICAgL0V4dEdTdGF0ZSA8PAogICAgICAvYTAgPDwgL0NBIDEgL2NhIDEgPj4KICAgPj4KICAgL1hPYmplY3QgPDwgL3gyMiAyMiAwIFIgPj4KPj4KZW5kb2JqCjE1IDAgb2JqCjw8IC9MZW5ndGggMjQgMCBSCiAgIC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCiAgIC9UeXBlIC9YT2JqZWN0CiAgIC9TdWJ0eXBlIC9Gb3JtCiAgIC9CQm94IFsgMCAwIDI2IDI2IF0KICAgL1Jlc291cmNlcyAyMyAwIFIKPj4Kc3RyZWFtCnicK+QKVCjkMlAwMlMwMtUzNjJR0IXSRakK4Qp5XIVcYL6RhbGpggEQovD0zMzNDc1MFZJzufQTDRTSixX0K4xMFVzyuQKBEABw3xNFCmVuZHN0cmVhbQplbmRvYmoKMjQgMCBvYmoKICAgNzcKZW5kb2JqCjIzIDAgb2JqCjw8CiAgIC9FeHRHU3RhdGUgPDwKICAgICAgL2EwIDw8IC9DQSAxIC9jYSAxID4+CiAgID4+CiAgIC9YT2JqZWN0IDw8IC94MjUgMjUgMCBSID4+Cj4+CmVuZG9iagoxOSAwIG9iago8PCAvTGVuZ3RoIDI2IDAgUgogICAvRmlsdGVyIC9GbGF0ZURlY29kZQogICAvVHlwZSAvWE9iamVjdAogICAvU3VidHlwZSAvSW1hZ2UKICAgL1dpZHRoIDEyMDAKICAgL0hlaWdodCAzOTgKICAgL0NvbG9yU3BhY2UgL0RldmljZVJHQgogICAvSW50ZXJwb2xhdGUgdHJ1ZQogICAvQml0c1BlckNvbXBvbmVudCA4Cj4+CnN0cmVhbQp4nOyddVwVzffH99LdrYgoSggWFhJ2YGCCIga2kiq22J0oKioKil2YWGArdiAGiigYoISg0iH8Zmfurhe4XC79/T2e9+vyPLuzM7Nndu8f9+M5c46u2wVdgZ96riGKDseP3f9SBABlEx7+qEWLNt269evWbTD+DGrRosXevYfr2i4AAAAAAAAAAMpEzClY8EdkxEmq2769t+Lq2lLgf5rMzKxmzSwoHjgc+S9f4uvaLgAAAAAAAAAAykR+7BnBH1nn01SfgwfufKprS4H/RfLzC1JSfpDjgoI/8vLaFCVKUdIUJRITw/3OZGXlJCUl152NAAAAAAAAAADwB/QgUBX+/PnTs+fQdeu2ktNduw4Q5+C4cdNJy/37z1q0sIqP/153NgIAAAAAAAAAwB/Qg0AV6dSpPxKAK1euJafKynrolBzv3bufokQoSq7urAMAAAAAAAAAoExADwJVZMwYV+ITNDQ0X79+q5FRRzExpVWrfNq37860d6hrG4uRkvIjISGxrq0AAAAAAAAAgLoH9CBQRUaMmIRlnxRRf7KyDcTFdXkTyzRubF7XNhbj1asoZNW2bQF1bQgAAAAAAAAA1DGgB4EqwvoH3dxmIKllYmItJibz4sXr1as3SEqqoPZGjf639CDC1nYIMiwwEMphAAAAAAAAAP80oAeBKtK8uY2oqMy9ew/JKU4uSn38+Bkdf/781cTEnKJE69RA/hgatkJ27tixr64NAQAAAAAAAIA6A/QgUBXy8/Nnzlz85MlLcurs7EF8ha1adSYteXmFrq4zkTCsOxv5k5SUSlESyFQ/P5CEAAAAAAAAwD8K6MEaIjExOSHhv19koaCggD329d3Nu21w0qTp7KXMzKy6sK4crl0LJ6bCXkIAAAAAAADg3wT0YLUTHf3B1tZORkZu27Y9fDsEBh4ZNGhIaOgNchofn1B7xtUYERGvzM0tbWy6GBtbGRl1tLTsamra+vz5y3VtVzmsWuVLJOHGjTvq2hYAAAAAAAAAqG1AD1Y7cXGfKUoGSYwbN+59+ZIwZcp03qtILTIONOnU1LQZMxaKioo5Ok7Ky8uvK4Ori+TklKioj+T48+dvCQnf6tYeIRkwYBR5Hz4+O+vaFgAAAAAAAACoVUAP1hBNmrTo3r0PERqRka+L6A1rKfn5hRkZmRQlixrNzbt9+RLPRlfGx///UE9l4ee3u0ED/c6d7cjpkiVr1dQ0V6xYU7dWCUmjRi3IW1i1yqeubQEAAAAAAACA2gP0YLXj4TFvwoTx4uLKSF+MG+fRtWvXkJDQBw+eSErK6OkZff78NTIyas2adT9//irCxftERESdnV3r2uoq4eQ0kegpW1sH0rJ69WbSYmHR9X/f9RkXF0/SoiKWLdtQ1+YAAAAAAAAAQC0BerDa6dNnGJIVXbv2adiw9b59R0hjixadiNzw9l5Zov+XL3Tuzfnzl7q4ePz69bu2za0yCxeuwyuTR399+w4njYwepEVxz54OdWuhMJw7d5X11S5btr6uzQEAAAAAAACA2gD0YLXz61d6nz5D0EH79j2RuFizZjM63r//GEWJqKjoPH/+svSQzZt3ESXi6Dixts2tGl+/JmDDORSlwusfXLXKB7crka2UV6/eqls7hcHLawm2WRL9LViwoq7NAQAAAAAAAIAaB/Rg9RIfn/To0VNy3Lp1F6LyJkyYhk4/ffry82cG31E+PjtJz4kTp9WerdXBlCkzeWtMtGzZibTPmrWYt71p0/Z1aqawWFj0wPbSGzznzl1a1+YAAAAAAAAAQM0CerCKxMTEtm5t0aKFeULC97S0n7KyakhK7NlzAF0yMbFgfGcU6sY76vnzlyNHjs/JySGnhYWFe/ce3L591+/f6bW/hKpw8WLYnj0BAQGHAwIOoQO2iMaLF69w+0HSfvDgiTo1U1jS07MUFbXZ8Nc5c5bUtUUAAAAAAAAAUIOAHqwizs5u7Kazx4+fkeN69cyK6EIGTox/TPT79yTeUevWbUWtDg7j2JaTJ0MmThwfEhJa2wsAivPwYQQTNUpLwmnT5tW1RQAAAAAAAABQU4AerCL37z8mku/48TPZ2Tmmpu3k5BT27j2MLiUkfO/Vy65du3bBwSElRnl7r0JDxMQ0yOnr128nTZpV5/vs/PwCR4wYvm1bANvy9ev3588jBAxJTU3LyMhCC88ukxzU4cePNAGTvHz5+t27j+zpkSNnHB2HL15cMvFOrbFt215m86Mc+p+7+5y6sgQAAAAAAAAAahTQg5UmNvbzpk0+KSmpt249CAm5wranpf0so/+nzMwscvz1a4KYmLSqalNyOn36Am3tBmpqhj9/1nZ+0fz8AnKwd+8hdrsfiXc9evSUhAQtiPr1G56Vlc13+IIFK2RlZZDlampNyvgYycnJTpu2gO/wgoKCqVNnkZtu2ECXgw8Lu8ma4enJ9c0VFqKehdW/+LJxcpqMTVAnktDVdVZt3h0AAAAAAAAAagfQg5Xj9+90RUV9OqxQUrPcznfu3Ovff1Dz5jZsS25ubo8eg2RktA0Njdev30ryybRr1xO116TVxQgOvqCn10hbW2f16q3odMgQZ6yAtNBf7950mlCKUmXzbfr7B6GWkJCwOXNmnT37V/w+fvycEoK7dx+wQ27ffoQmOXToBH4yD/B1EdLtz58/ixatYc1AT7iIrgRxWVdXr169+vv31+omRGPjttgSTSIJ3dxAEgIAAAAAAAD/NUAPVo6kpGRW7ERHfyCN2dnZCQmJvN0yM7MGDhxBul26dCMrK3fhwjVFf6UWJSOj4OQ05erVW6dPX+rceXBKyo/asR/XiRBjl/Dz568zZy6ypwEBB7GREvhMEf1duXL97dtotsPRo6fZqRYsIApOgaLU8LrYjxoOuaQmTJjBdg4Pf8hOcvr0hdjYT+QxEEmYn1/w7Fkk22HOnGXYDFm25cOHuNp5PkV0uG+SqKgcI07pgylTptfa3QEAAAAAAACgFgA9WGn8/YNGjRp1+fJNtiU6OkZERHLsWFfebn/+FK1f7yMrq+Lj40tSlKSkpE6ZMjMwcJ+EhHxw8GXSLTiYlmN5eXm1Y/zTpy8Y3x8t2Ugem/nzl1lZWc2Zs5j0WbZsA1FhZmYd0Onw4RNZz52JiQXp8/Ejve/P3Z2tOqHMiEEVcu7gMAZ1iIuLJf0HDhzFTmJoSBehsLMjLdT48Vy15eOz08rKcuJE+jEyulucqMJbt+7VzvMhXLp0E99dCnsJ5bG2da9NAwAAAAAAAACgRgE9WBXi41Oio6PZ08TEJCJt1q71JS0hIVe2b996+PBJik46auDk5Kyu3oAtvtC+fX8Li87o4Nu376qq9ShKIiMjs6ZtDgkJCQ9/jA769+emP9XXb/7tWyLfznfuPED9yfHKlZtYP92SJWtRS58+QymK07SpSVTUB3//AC2tBhQPqqo6SAjn5OTZ2HSnKFFLy+5//vwJCjrKdiBRqUV00YpLYWE3+BqQnZ3N2tmly0DUEhn5NiTkfHU/lTJZuXIz4yTVIJJw1KhJtXZ3AAAAAAAAAKhRQA9WGuxiE0cCYcGCv5kwBw1yRi1duw5OSkp9+PAZEw9JrVq1pkUL7v5BknAmISFRW5vegair20JKqh7p9ujRs5ozODs7x8rKltxo+3Y6iai/f+C2bdvS07OEnMHdfba+vv7w4c7o+MSJs3gmUdopqNwYtWRlZZ8+fWbz5m2bN28NDj716xddS9HefizuRoeebtiwDbW4uc3S1280YIBDYmKykPfdtWvP9u10tpmTJ88R+1u0sExPz6j4M6gMAwaMJAIXp5dRQEfDh48t0aewsHD37v1hYXWZHhYAAAAAAAAAKgrowUrj5xfIhFNas415eXmurjN0dY1UVRuiS9nZ2TNnLnNzo/NkSklptWzZAYmgnz9/BQQcUFPT4/WmiYsrt27dJjr6w/HjZ0eMcJo3b97Ll2/ZacPDw1euXNe3bz8fn52VNnjr1t34Vgr4vyK5uVWKTV28eA0TIIqQL6ubhUVv3IEOH50yxasqd0QoKOgx3rq/TthaQFfXGN9XA0tC+u6DBzuV6DN9+gLU7uvrX2tWAQAAAAAAAEAVAT1YaZKTf1ha9mjduu369X737j2Ojo4h7WPHkgr1Yvfv30tJSWX7a2k1ZeSYMk9QZcPhw0fv3r03KYlbsH7GjIWsRvz2LfH69TvNmrVi+0tJ6eTn51fO4IMHT7CuOjRTWXUxhOTz568NG5oQq3bs2Pv7d3rpPl+/Jty9+1Bamt6iqKKiExUVXbpPhdDVbY5vKI3+Nm7cXsXZhOf9e+LnFcGSkJsnZ8CA4bx9zp+/Qp6Gl9eiWjMMAAAAAAAAAKoC6MEqkpr6W1xcoXlzK3f3+fb2zkgAUnQSleGSkjrZ2QVst+HDnVkhQ9FVKlRHjhx/7NiJ/PxiTrqnT5/17DkAh6Ei0cFRVzckIal455oq2b8WGnq90taOH+9JDCD1I6pIVlb+yZPHHzygY1yRCra27oyEIbl0+vSFLl26Gxi0KaKV4zfULSlJUEl6IUHqWEaGVtNduvSr+mwVIijoJH5ycvjVcCVhv372bIeAAFLAUYqi91eur2XzAAAAAAAAAKASgB6sInl5+YsWLcPb6MTr1WsUFHT0+fNXRbSbz1tSUtbLa6Gr62xtbWPGv8fp2LFrYOC+379/sTN8+BDj7x/Yv799/fqGTCU+dWa3GoVrMajzFHHg7sKrKBkZOa9fvyS3e/fuQ3Utn2X48PF0PKWGETm1s6OTwNSvb1rtN/r2LeXNm9dFtM8uOjm5GjSm8EydOpuJfSXVNGhl2qvXQHJ14sTpzLujJWFY2O3atA0AAAAAAAAAKgHowWrhwoVQJAEcHUch2UVaFixYQRVDfvz4qc+e/U0XExn5cs2a9R06dCHygUG2eAm/0h9q0aI1FTXvzZtoLa2GaGzbttbl964U48a5o/k7drQlp1On0hUojIw61NDthg2j/a3y8mq3boXX0C34Ym7emUewcyXh4MEj0KVOnfqTEGAcU0qpqOgXFhbWpm0AAAAAAAAAUFFAD1YXT5++RCrAyspqxYrNnp7zSTwhvVtPQsnLa3ZsLLeQekZGxsaNW2xsejCuQBJhqFKqmDtbxa+kHly+fGNFbevc2Y5xNVKurrOre+k0takHN2zYxkZmNmrUuiZuURY/f6bLyhK/rQYjCVWw0O6hqWnEI+dpnRgQcLA2bQMAAAAAAACAigJ6sBoJC7vN6xEUE1OaPn1WQsI3cvXt27cTJ7rIy6sz12XK0IAkUlQO91EsHixKS4+dO/dV1DCkVphAR2r0aJfqXjdNberBBQtW4uXQmktLy6QmbiGAu3cfE6HP4yVUwy3SjEjkxvq2a9ejlm0DAAAAAAAAgAoBerB68fPbR8TeuHGT4uI+k8aYmNhJk6YigcgoQVWBSlCBmcHDyKgj1h3sVSkREemvX79V1KoHD57IyNBiUFW1/tu376t70TS1qQe/fInX12+G5udwpM6cuVATtxDMpk27GLVe1ntUxVdV2rbt1qVL31GjJq1Zs+HSpUsQQQoAAAAAAAD8TwF6sNpZtGjFuXMXyXFmZvbMmXM5HBksHySLO/tKK0FJogStrbvdunXn69cEZWV17HVi+4jUr2+alPS9Elbl5ha9eBHx50+1LpWHWt4/iHj9OjItrZZK0pdmwYLVjI+yrBeqwUp7loYNjWfPnhsfX2FFDwAAAAAAAAA1AejB6iU29gt7vHv3Xg2N+lgHiJStBNloQzoEsV+/IWFhYWT42LHuPKlLuB8JCXUZGbVOnbqfOnVaeKtCQq6NGuW0atWmal4tD7WvBwMDD48ePXLTJr+au0VZPHv2dPr0OVi/K5WX/4f3w91SKi2ttHDhkuzsnNq3HAAAAAAAAAB4AT1YjWzbthdJvwcPnn7+/LVbN1tG5akLjCpUERVV1tU19PCY8fr1a97ZLCx6M8GlvOJRhY077dSp19mzF/LzCzMyMjMzs8qy6vXrt6x/qlu3QRVdlKent7OzR7nd7O3HovmNjTuS05Ejp9ApVzQMyx3o73+oX79hFbVq4cLV7KJ8ff0rOrxyxMcnbN/u1759F+HebFnyn7uBtH59I1/f3efPnzt48ATEkQIAAAAAAAB1AujB6uLduxhSO15ERF1cXBP/5FcSGCBKPmKamg35TqipacJPDyoyyUxkWUFkZ+eUmlpmJT4fnx2MeEHI/qlIzGheXh4OWKWioz8K7unuPkdeXrF3bwdy6uW1SEFBsX37noJHFRQUyMvroPnfvHknvFWINm264eVooT9LS9sKja0Eubm5Y8ZMkpBQZB65XMWVYFlOYWrq1Jk1bT8AAAAAAAAA8AX0YHWxffte/OteDatCUX5KUA2XSChRX16RwxFbvHhVidkePoxgdEexFCUqKnodO/Zp3tya8RKKWlp2efYsUoBh799/YGtb2No6VGhRy5atIwMbNWpV4SciBGPHejKOy6EVGrh69WZWT+3YsbcmbOOlb98R+FZiPEmBKOadVk4SqoiLq+rpNZ49e3FNGw8AAAAAAAAAZQF6sOoUFha8efNm5swFWCyUTjDCLVFH3HPS0vWLVxVUJy6/wYMd37x5SyYMC7uhra2PZyshKpXk5DStrbt6ey+5f//JmzcvP34sx21HuHXroavr1F279lZ0aRoaxthsurLDhg3b+fZJS/sZGRlZUFBATn//Tkcf1gv56tWr5OQffAc+evQMTy6D3Z3inz59rZBtZ89emT7doxbEIILxyVLi4uqjRk0dO9ZdTa0xbhThqTFRoY+shobO3r0HasF4AAAAAAAAACgL0INVITQ0zMHBSVfXEDv+xIt7i8ixOHenmbre5MkeL19GubvPpShOcWmgzmQakTczszYx6ciIx9JCQ41IM8SIESOzsvIrZO2dO3dv3LgtfP+3b99jEUTSotKbAfPz+dzx+/ekwYOdaOvldeXlG8rL6+EPOmiAGrt37//5M3+hN3jwGCaQFS2N2rlzr/C2ITl8+/ZN4ftXmri4L8OHj8K6T1xFpd779zGkPSMjY/nyVXJyalVwFCrJyKi3adPx4MEjtbAQAAAAAAAAACgN6MHKERUV3afPYJ7QQWms1Igo0MA+LxzNKapkZzc0MDAoMzOTDJwzZymjIFRLqUJJ1o1YXmUK4nAUOXbsrDDWpqb+bNy4BZl6+nRvIdf45s07PEKS6DUORy0rK5tvz9zcfEvLnozxHOZDtWzZ8efP9LLmb9++JxNhS8+/fXuAkIYdOHCc3ElSUunlyzdCjqooeXkFCxcuEROTY4ykJk70KtHn06cvAwYM4nBksZO3ErsI5clCevXqm5Lys4YWAgAAAAAAAABlAXqwcowe7YJ/yCvyE25yDRoYL1my+sKFC58/fy4x0MFhEj89qM6bH4bxmgl2OWmQfv7+B8u1du/ew7gv8UKK/Pz5i/dqWcktUXv9+s0ZTyXVrl330n2+f09ij01N2zOd6f5NmrRgL6WkpJbOYzNr1hJmpaIUJSZ8Vb6mTdszAylHx4lCjqoQx4+f0tc3ZOQt14N57tzlIpxb5sKFS7Nnzx80aETPnoOaN7fhcJQrWHiixKunA4Z1dBp9+5ZcE2sBAAAAAAAAgLIAPVg5jhw5xSgsPlvDGjZs6uu7tfSogoI/8vLaOIiU9RCxWws5bdva+Pr6nzlz0dS0o3BRiBokUcyRI+V4Ca9cuc7qTHFxLWQGaY+IeBEYeCg3N6+sgWFhN/Eg2t156NCJ0h2WLl3n6TmPHH/58h0vTQp7S0Xev48j7Rs2bJ882au06vzwIQ73p72iy5f7CLA/NPRGcPDfeou9e9szSo1auLBkKp4qkpLya9gwJ+ZpqTGvQFpRURM9jQULluKtnbyI83Tj9f0pYo0vZBwprW0NDGokaQ8AAAAAAAAAlAXowUqjo9OMcYeVlmm0xpGWVjh69BTvECenCTy+P250qLa2gbv7tEePHvP2HDLEEWs9lfJ0BNdL+PjxS8HWenjMl5CQ1NVthFQYOg0PvzdhwlQ0cN06PrqVl3nzVqJuenot0fHt2w/XrFl16tRF9uqxY6fR1UmTXIjPceXKLcSeuXNXkg5r125Ep97eK9khERFR/v67Hj16UURXsZ+Ornbu3F+wDaSEorV112PHuJrUyqqrhIRUv34OWVllFl6sBBcuXFVTq4dXIFciDaycnL6oqDqjARXLE3psf9GKJJyhXF3nVeNyAAAAAAAAAEAwoAcrzaNHEYw64Jv4hasdPD3nRkS8un37/pAhIxkpQZDs12/IoUOHs7OzeeZ8tmbNuri4z8nJaVgwypQhHOR4JAktCeXk1FJTfwmwtoiO7UxMTf199uxFW1s7YoGSkj7fnmvXbtfT0zM3b3fp0g10amLS5ujRM2/evGddYleuXCc9Hz58yn0EGvpLl64LCDgkKirD4Ujt2hW0fv1WI6PW5Gpw8HnSPzo6hs2xc/fu45SUVG3txkW0V+6nvf3wtm3brljx11HI61IcPdqNjDIxaevruy0uLuH7d2HjS4VkxYoNzPpKB+uSqE4poZWdgry8tpvbHCWlBthLKMwQUo6k/FKPAAAAAAAAAFBdgB6sCmfOhIqISDAqr/RGQg1mV6AUIzToHXxNmjRfsWJNdHR06Qm1tUkNemld3ZZMNtGSwoHDUdHUbMYkGlXA96W1Z6tWVsLYLCOjwwykRo92IY1Xr969dOkqOcb1Cv9y+/Yj1Pjt23dFxfqMVqLs7JxIZ1wzQhTrUxYN1mvJLvnMmQuk/8yZi3Aj7QuTkFD9/j3px49fWVk5ysra7IBbt8JJZyQefX0DyfHly9d41TS6byXfWRm4unrhicUrWz+itLijjh49vWjRWjxtBaJGLS37VO/SAAAAAAAAAKAsQA9WkTdvYuzsBikoaArMKCLNVJanpk6dJmC2yMjXbm7TSKk7frvS0EeFw5Fr29Zq1SqfefMW8/iz6MkdHSeUa3BMTCzWp/QtNm70I426uiasNjxz5hKeU5loRg5HvoiOL33IKxKtrLiaBfsHRViXX716pmJi2qKiGvXrm5H9fUQLnzoVQvo7OIzDjaKkf0DAIdTYrJkFo/VoGbVixSbS+dq12xQl8+MHnXjz1aso1uwNG/wq8aYEMHQoqXwhJzCta4X1YFDQUfy0Kb66vizHIuq9fz+frZoAAAAAAAAAUO2AHqwWWre2ZtxeAj5Iy8jm5NDJW16+fBMby/+RrlixVlJSHlciEDCVqKGh6Z49gUgxde7cA99alngMJ092L9fazZt3Ezl29CidpOXAgZPouFu3oeRqdPQHPJsYVm20d2/r1sApU2ahg2HDJjRubI4O7O3Hkc6PHnHjRQcMcLh16+7XrwliYlIiIuKxsZ/v3Xvo6OhMrh47xs0G4+Y2h6LrROh07kzHrI4Z43rjxn0eoYmEM3X4cDDpvHr1ZoreXdi3CKfiIca0bt25Sq+qFPPnr2DUaGkxqFQRKVfiQ124EIrmR4KaqdkhpIsQiWuRixfDqneZAAAAAAAAAFAa0INVZ82abYygKEcgqKsbF9G+tkfouHdvhxLz3LoVzoRNKpXnqCKJSigHh+EPHkT4+m5j9BTtkjMwMPPx2ZaWJqieXcOGdDnCa9fuvnwZRRx2UlLqKSmp5KqamhFqOn/+SnT0Rzs7B6LUdu7chy5lZmYiYevhwU17smXLLopOcMr1Z504cZ7YERR0nLRcu3ZTVFR68uQZ5HT9+q3oakTEK3SM1Kiior6aWgNNzQZ37jy8cuUGh0M7E9PSuBsh27e3JbPNnbs0NfUnSXNa7dvrOnXqj2/ChomqMbG4LMqlgkgFF5ggW0elc3Nz0fwiItrYhSqsHuRwtMhdjY1bPnkSUb2LBQAAAAAAAABeQA9WEWa3nbgQoYaiurrNExK+Gxq2I8GEaHhi4jc2D4yKSmM8VbmVB1nZQgclSksrBAeff//+47BhI7ArSgw1jhgxPjU1TYDZd+/SmtTIqBWb5hTHYW4nVz085kpJ6bCdxcTosuns6YwZS44c4eZNDQg4ePnyDfZSw4bNSTH6+vVN2MZHj15s2uRHksMgjTls2N+KgUggo5kfPHhOTuvVM2vWzJocf/36jVeSaWjQEnXsWM8Kvp/y8fffz4g+dbb8R4MGJrNnLz558hwSrWJiqthxSR67Bu8Tw6elM8/Qqnbnzv1ociTo8BuRE1oPSujomG7ZsnvixKlz5y788iW+2tcLAAAAAAAAACygB6vIpEkzeEScGvYEKZStDVWkpOrR8ZKS6vb241q37iwhoSovr3zlyg00lapqk+KOKmE+3BKEixatQDMkJye9fBnJ+tcEY2NDR2w2atR68OCxDg4TmjUzU1c3IPXlMzIy163b9uvXb9KzRYvuFJ3m5QE5/fPnD98S9uPGTcX2qxBVNW6cC+9VMiQ/vyA9PZO0ZGbmiokhnSVJTvPy8ry8FkVGvibdHB0nKSkpDRw42sFhfKdO/fDMsunpGRV9QeWSkvIDKz7uFsgWLdofOnSEXWBUVLS4uAxPZlF5SUnl5cs3HD4crKFB9Ls887qJMKSZPXsJGe7puRA3CJlPBn3kxMUl0U2rfZkAAAAAAAAAUBrQg1XE0LAD/sHPjRKUk2ugpGRQRuwoqT4vwriiKMaxSBkYtCnC8ZM8jie+29nKik6kk5AMGuRUIcsjI9+gURMnerAtFy5ciYmJLdEtJCRMTEwSqTY1Nd2bN+/wnerZs+d9+w7CZisx+U7pvX4DBti/evWG75CPH2Pbt7chS/X3P1DiamZm1smT5zIyuLUFDx8+hbq5uc2v0AKFBElgYoampu6OHbvZ9vj4eC+v2fiKDF6RMtG5mzZxvag/fqTZ2Q2hilO/vn5g4CHS4devDCQe8b5R4QW+OnaGGtbESgEAAAAAAACgBKAHq0JSUgoJBMU/40U6dOj2+vVbZeX6FSlUp0bUX35+PprwyJHTkydP7tKFuMME+BlLT0LrL0fHieXazIulZR8chOmSmZnJt8OWLX443FGcLSHRo8eAOXOW7tlz4MCB41u37p4xY37XrnaMc02Fx2B1ZheejJ3d8OXL1x0/fvbq1VsXLoT6+OwYOXKCuLgq41elc9f4+paZMnT9+s04BlWuJpyDRbRq+21p2c3V1ZP1q969e8/ZeYKUFHk16FVqMxVD6BWxtRQJISEXp0xxs7cfNWPGnIMHD5F8QYT27bswaxReD6qSmhoXLkA+GQAAAAAAAKDGAT1YFZ4/f4l3k0mT3/Dx8d+RuGDcZBXwB/XpM6xEBObcuUsYhSW8rqS9hEuWbBTeflw9ECsWdb1ZsxZev3773buYqKjoq1dvzpmz2MCgOb4owoSwqjPlFEsjXYbqUS++244XMWaIBtnzaGxsvmTJ6rt373/+HP/9e+KrV1G7du1t1aoj6T1+/Izqfnt8QLp4+HBnxkIRJgxYun59swULVpK3fOvWvXLnQS+zd+8BzBsUPlj071diyBDnWlgvAAAAAAAA8I8DerAq4Op7okQlqag0QS1r1/piFSD8j38JfX2TpKSU0pPXr2+C/WLCqwluJhN2o58wmJpaF5dpCnhDHItKGUKPJNgUXq6q4v6K+FN6lDpvSk8RETUJCS2STRRDK8q4uC/V9trKIDs7R1mZbAlEL1QTJ4FRIg5cL69FqANRvpmZWYLnefMmunlz88qKQfLhSErWK/dGAAAAAAAAAFBFQA9WhXv3HmPJRhxz61BLq1bdKqIH6f1o/v4H+U7u57evPBchm8GGYk7pqE4FBZ38/AIhl3Dw4Ameu5BKeZJ4RZVQMdXykcPRp6JY23I3WnbtOqTa3lnZ5OTkbtniP2rUeFaZcjjcFDE+PjuKsB5cvXqr4Em2bt0hIiKNR6tVVgyqEi387NmLWlg1AAAAAAAA8C8DerAqXLt2m3Vg/fiRmpGRwXiXhP/lT61d68t38vT0DCkpTRxLWabHTVpa3drarkOH7oyCIfGN1MCBo4RcQl5enpKSXkUK5NXyh94XWcvF2aWlZfX0TG7fvmdu3o3UefT335+W9nPbtj0CRj1+/MzGhn0RFUoSy3/Vp09fqLUlAwAAAAAAAP8moAerwqlT58mv/6ZNOxTRxQW8eUSZMna3leshoszNe5Y1f9OmHQV6G1VEReVat2536NDx4OAzmpp6jDilt7mtX79dyFVMmTKLMbvO1R+f56OlZVw9b0toEhN/5ObS6X2aNrUg7/fDhzgB/ZOTU93dpzFKUKk6niQdPbtz595aWjAAAAAAAADwrwJ6sCrMm7eKaICrV2+jUw5HkXHnkTwq4kLoQQn0SU7+wXd+bW3T8qJPSd0KscOHTxbRWWgW4Am5TJjgSdKWCiYi4hX2D8oIp1NITCmHqiQcPFxeuJQ79Oq8vBZX60sTltTUNLIf08triYBuW7f6qajoMEq8ujQ1rQd9fXcLuC8AAAAAAAAAVB3Qg1UBR1pSbdt2K6JLM/hjUUB2jYmbmVmvWLERqzNlgb/86fDO+fNXl548PPwJnlC+PO2gQWTL1KmeRbhq3qpVa5ycxs2fv/DuXWETyzRu3I4xnndmlTISiirIyupqaho2bNjKyKhDy5Y25uZdLSx6W1ra2tj069TJrlOn/jY2/S0t+6DGNm26tWzZCXVDndEQNJBstyyFDL+dktJI6tZVcXZvb1rsz5zpXVaHW7fCzc0tGPvVq7BbkL8e9PMLrM31AgAAAAAAAP8goAcrR15eXo8eAyk62tMyPZ0u3qem1pRHD1KnT4cUcTNSChZ0algKid64cZd3/l+/0g0MTLE3TRiXkzqJEbW1rWTelWXL1vMYr8rkqEGINmzYvG9f+ylTpq9atfHo0eC7d++9f/8hLe1nbm5uRe+ChqCBaPjdu/ePHQtes8bHxWVG//7DGjVqyevW5EnLSRkbW1VuRVXkz58/a9duOX78dFkdPD1nMtYq10CoLb1/8ODBE7W5ZAAAAAAAAOAfBPRg5fj4Mc7JadzevfvJKZNYhkg/KYoSLywsTE7+IbSDDyGxdOlqNO2XL/G7dgXq6jZlvE5CKgiyY5EaNmx8JZaTmZnF4ciyJR6kpTUcHcfu3h34/HlEtT62Mnn16tW+fQfGjXNRUWnAowqp/fuP144BJWDLQRoZdbC1HbRs2ZrHj5+QFvSOzMxaMwGiBI1qdQ6qEnVfy1l0AAAAAAAAgH8Q0IOVo0T5eB7/Gu3VatWKjiA9d+4y4z8q9/e/Bt5sSHcWEVGvrMrglqTfuHFXRZdz+PBZDocOOq1Xz8DXd3tiYmK1PakK8vv3r6CgA61aceMwp0yZVVeWIE6ePCcpqfPy5Wuk0Ddt2rJly659+/YqKDTEpsmhv969HZycJmNhWF1KkATN0u/xzp37dbh2AAAAAAAA4F8A9GC14OAwDmsEIsooFxdaxbi6zmEahVRzqjhsUgxLyMr5m9RJfYT4+AThjY+JiSUBoj172v3+nVFjD6lizJgxl0jCOgybdHAY37mzHTkOC7vJaHYOccUuW8bd9SkmJo9fXNXFoAaWljIkXlT47Z8AAAAAAAAAUDlAD1YLFha9eKQfV8I0bty+Inqwuj60e3HEiMnCGz9unAcaYmjYpnJrT0lJfv/+/ZMnz2/dCg8NvX7+/OWQkCuhoTfu3Ln37NmLmJgPqamplZu5b19HZFiDBq0rN7zq6Og027v3cH5+/vjxrnTwrroeFmu0887Z2Y3tpqpqUk0vWt7Y2AIHGNO+2mPHztTVwgEAAAAAAIB/BNCD1ULLlp0ZRYD0glh6esbv3+nY0ydd63oQfUQ4HGVkgJDGm5t3RaZv2rRTyP5RUVGHDh3x9l42YMDwpk3NpaW1sIQRo/ggjpOR1mvWrP3gwSOXLFl19Ojx6Ghh84U+evSczPL27Xshh1QjkZGvlZQakeO1aze+f//BxqY/uzDWkZqamiYhoYpXWvUXR61cucnPL5DcwtFxau2vGgAAAAAAAPinAD1YLbRt253Rg5SSkgFq2bv3MM+Owlr+0NlIzpy5KKTxpqZWqP/u3QcEd7tx46aHh1fr1lbEe8WDZPFTYgOvPBQv3kGmbVsbT0+vsLBrgu+IZCAZcOvWPSHXUlHCwm6OGTOuWbO2bdpYrFu3hffSjBkL0WtFB/n5Bei/Hz7EyspqXbkSevny5Rkz/lYk3LTJn1m1inBFFQXpQVtbBzTnrFmLyMLv3n1YQwsHAAAAAAAAgCLQg9WEvf1YVg+2aUMnk3F1nV13epA2Y8OG7UIa36ULXThj0KCxfK/GxsYtXbrS0LB1Kd+fGN7vhu4lp6pq4OQ0ecKEab17O+CShdL6+q3XrNmCPsOGTcBOUkVFRX1V1SYlpmjcuNmCBYvK8hguXboJ95JPSPheufcimHHjXEvYM2zYRPaqgUGbtWt90YGKiv67d7EuLrM2b/ZnrxYU0CLx0aNnzFBNEREtRhpXugIFpaVlTOZHct7CouOAAcNzc/NqYu0AAAAAAAAAUAR6sJpYvnwDo/6oiROnoZaBA0fxOMtUsUCQx5672lGI1LJl64U0fsuWXdhUztmzV3jb3759P2WKK4cjx8olOzsnb+9VS5euQ+pPXFyVlcA9egwmQx4+fEp69u07nLTcufOAtKxcuSkzM+vKlTBr655YN6nxFKaXGDnS+dGjJ7x3//IlUVqanrx1625VfT38mDTJi6I3JxoEB1/IyMj8+PFzq1YdKTqrJ53FJTU1TV296Y8f9M5HERHajKZNO9y+fTcnJ593knv3Hi1Y4K2tbYgT8oh6es4bPnwCsyi1ir9rSkGhITKGnT89/TcRngAAAAAAAABQE4AerBaOHTuNJQAtcE6cOItabGz68ehBJAYlORw1ZeXGuE9Ni0G6ch9SeUIaX1hYqKNjTFx+J05wK7DPnDmXiQulIz87dOj05Mkz3lEfP8ba2JAoWQotljSeOhVCWjp16k9aDh06SVpCQ2+QlgEDeJUyEU3cANQxY8YnJiajPi9evNbS0ieNly+XE1ZaCZ49i0Qz6+o2zc7+q++uXQtHjQsWrETHK1ZsNDJqT9rfvfsgIUFEsZSsrBJ5v7wcP36WmDp5shc6DQo6oKNDjJdkXKjC6kF5eT3hN34CAAAAAAAAQBUBPVgt4J1uUrgSgWhy8o8iOsNMJx7Vo0RREs7Objo6zXDCmZrWg3Q1hDNnLlfE/g+SkspE1CA7W7Xqgg9FKUqLoiNgrcsa2LixGa/6Cw4+z1cPiogo//iRVkQ7vDIkJevhB1XCZq4XEgmisWOn4dhLmmnTFlb+rZQNSakaElJMaZ49ewU1+vjsQMeNGpnPnbuSvfTrV6aHxwxxcVrvz5jBNenVqzfkYNu2PX37Onl4eCsq6rNDgoIO6eo2xtGz7L8JyDH6uiyRSGlrG4NDEAAAAAAAAKg1QA9WF/Xrt0S/55s1syGnhoZssQm6AMSAAaP9/AJ4FGJNf8Rat7aukLJITv7Zu/dA6i/qxHMnKioTH88tT//tW/K8eQtcXKa+fs3d8Xflyi3U1cKiNzktSw/q6XFrRkRGvsYyk1cUI3FEGRl1ePw4Yvx4T/b2amq6e/aUk+KGcPXq3a5du3Xs2L1Hj0HDhzvt2rWv3CE9ew5Ft0hL+8XbaGragaJLN34rouV81yZNjE6fPsfbITMzMyrqXWFhITqeMsULdXZzm5ucnKaubojWlZeXh14x6fn+fdzy5Sv19c2wHiQ+UAor6+743wTK8hFTZmZWwiwZAAAAAAAAAKoF0IPlUShkv65dB6Hf8zt3csVIo0atGfVHO5Vu3rw7ePD4WtSDtAAZP356hZY6fPgYvA9OhVeh9OrF3QmYnJyipsaN4ZSW1vr5kyumkL5Dao4cnzx5jnSwtu5LWg4fDqboDYZDySkTWKvMY6c0On/wgLt50MrKFneQ0NNr+vt3Vrk2s/GovMycWY5Xcdmy9aiblxe3G9KA/foNQS1jx3oU4YSizZq1JVMNGDCUb7WLfv0cyUKkpeWcnd15L4WEXMEpVcWx7CUrpaysut+5E16E05aW+hqoMM8cPW37cpcMAAAAAAAAANUF6MHSfPwYu3bthq5dbZs0aaWnZ9a2rbWrq+edO3cFj1JTa4RUTF4evR/tz59CXd3mjJeN6t9/1K9fv7HwqYVgUVZnIT0iKmRmzvPnQxUV1Xjcgn/1oJvbHNInMPAQ7qBCxOb69dtIu4iIVsOGXPffgQPHiYwyNORuviM7CsmmPISn5/xSaojiLd8wdeos3EEDK1ORLVt2CDB7yZKVuDOHJwJTHbvkqGnTZgsYmJWVpaNjiLrVr29mbGxJbLazG0auOjhMJC5KJuONlLf34j9/is2QlvazUyciXamtW/179x6ApC659OLFq69f41xc5rD61MKiKztw7txlxZ+AOnaYypGvCm+ZewAAAAAAAACoaUAP8vLpU/ykSVOJx6o0vXv3i4r6wHdgePgjit6yx83PmZubq67eFA+iBUViYvKVK9cZiVE7epDrIly+fEO5q166dC2zRJFSu9uoMWO4zq9Ll67yPo2goCOknaIU5eR0yfH79x9btGiro1OP9ZP6+u5BnS9cCCWn1tZ9edQQHSnaqFELXmO2bdtT4rGPGOFcOu41NfXn4MEjiFgrtR2Pu1Nv0CCHrKzcslYdH580cOBQcosmTZr5+HDLc1y8eA23yTEviyhTysCgWXDwmRKTbNmyvV+/Ye3aWTk4jEpOTvnyJeHOnftF9BcpoUED0+PHj2/evFNcXDYi4g07pF+/Eul0JE1MOsrJ6RGbkVos930BAAAAAAAAQHXxz+rB/PzCy5cvb968dcuWbaGh11JT0w4cOC4pKY9/q0tgR14J6EscjvilSzdLz3bu3OXbt++zp2lpPyUl62O/D2VtbYdazM17FPcK1ZIetLNz4rXz2LGTz5+/5G0ZPNiJLM/JyeXatdsiIirF3W2c+vWbsZ1HjpxCCzApWReXWaTl6FES/0lFRr5ju/HuWzQ0bIeufvwYR061tEyZ56BGqvW9e1dMYp87Ryd1MTU1d3efffbsZVtbe3RqbGz+6dM3tg9SXnJyOngeFaz+VLCMFcHBmWqMJFRClxs2bPLkSSQZlZCQdODA/hMnTqSnZ7NTZWSkp6amsKeFhYWKirp4ZvXiT5K7AbBHjz7PnkXy/0oVFfn5BUpJ6RTRUa9937zhRpk+fBhBvMZFdDHHrxyOFP4HBzKzIpozIuLloEGjyfzCZ4UFAAAAAAAAgKrzb+rBc+dCGzY0KCb25PWwDqJIck4kVSwte0+YMM3DY56T02RNTeLsU8T/FY+JiRU8P043KkMm/PYtqaDgD5mzdvUg7d1r3bozMcnfP1BTsx4SVh8+cKVZVlZehw4kjyg1dOgY0vj+fay8vAqPYFHGImU3u7SYmA+JiUnsaUhI6ObNm7t372Ng0Ayp4BLPwdubjo3U1DQip0gV4sdCngNdFGP+/DVs55ycnCJc+C86OoZ3ksGDnVFPJSUN1suWnZ0zZ85ibLgCqVUhJqYpI6PLPGQNRsSp4xaxy5evX79+j33XcnLKDx5E8H1xs2evKFu2qzPhoyJeXrN+/84sPTws7KakpM64cZ7a2sYJCQn5+cX8mkj3qavr4Rk08INFHw4yG126f/8xsW3Dhm18DQMAAAAAAACAmuAf1IMnToQwykCJ+amvgLWbLNKFMjIaS5Yse/GimA8oIyPDy2suM4QyNS0nCeS1a7fIDVxc6M1327fXZmZR3o+ImZn1kSPBzZvTyU4VFLRzc7mOql+/MpE2JEaOH/93z9rFi9fk5FR49jmqY20oeuZMiID1Wlj0QfNoaRksXrwqLOzWzZvhO3YEdO9uR+bv338E6YZ0GePUoyNFmzQxZ2eYPHlmCcclITMzx8TEHHtsKVFR6fDwp+yluXOJdqNWrdoYH/8dCcnNm3fgBgke/6YGfrPiWAZqHDp0aseO/XQPCaX09IwS9/r+PQkvVkJgWK8GMUZbWy8ggE8iUze32azwNDHpmJPzN2D19OkLeKwYnl+F9Nm0iesQXL58IzodPHiMgOcMAAAAAAAAANXLv6YHExK+E3VQRg04SXl5lczMPL5jp00juVDowNErV64LuMu8eXSeE0vLXuS0bdteeGBtbh4kHxUJCW2iO9TUGrBxktnZeU2atCLts2YtYs1evJhsJJQp/nC4G+icnMaHht54+fJ1dPT7rKy/3rGAgAPE7UbxATXKKyvrWVn1XLx4jZ3dSOy/oyNFtbUbJienkRkCA49RdKEHbuqbjIy/k7dt2xXPo04kJHp3Dx++YK9OmOBy+HAw75MPCDjEvCP2aRMvIfX5MzfidPPm3Xw9cW5u85h7CX6qrOeR6tSpx4MHj0vMM2fOAgkJEnhM/f5dTHUiGyQlaS+zoWH7MWNcKDrvzRz26vv3H168eC3gewUAAAAAAAAA1cu/pgf79h0p8Dc//Tu/Xj0TdsNXCdTVG2P/DjV6tEtZt4iIeIlki7091yn28+cvERElPKr2/YOsH0ri3btY1sI2baxxI2flyk28lu/evZ9orlJiWYMk7cRII7WlpmbQunXXdu16qajwht1KiYpqcjjopnKiohpM5G0JNPGEnI4de5CbIvUnJkarJyTTtm/fbm8/llQARCxYsAYPUeZJ7UKJiclFR8cJeMUjRkzBo3jtp4YMGc92yM3No+hSgN14R+Xm5kpKauAtn0LKdu4WRYSn5/Rfv4rpvm/fEgMC9j58yPVmPnnyPDk5lRxLSNTT1zdu0IDOokPUa9++gz5//iJgRQAAAAAAAABQQ/xTehBv66PKK/pA/8g/cOA43xnmz19FJIC+fquy7nLv3uNLl66wp/7+QaXkSa19uMUXgoMvsvb06sVNqrl0qU9p4wMDjxJlV4ZkVsATclh1p6BQf/x4d7TGCxdCX7x4lZDw3c8vEHVYunTdhw+xN2/eOXDg6MqV621seouKsvJQHE8urqWl7++/395+HKMTabZu5e5VfPw4spQlaiTPj7KydkpKyb2KvNSrZ4jHsqq2WJ7VgoIC1NK4sTnvkG3bSEyvSsWfML2BUUurwe7de/kac/36Hfr1qzZGxytWbGrZslNiYqqWlklcHK0Bly3bgK6+fv22rLUgoXrq1JlZs+aPHTt13DjXmTPnHj16lDcMFQAAAAAAAAAqzT+lBydN8hLoHGRFBzVlihffGY4fP0Nki4SE9s+fgiQJy4ABJHVk7QeLqhLnoKenN2uMh8cCVsq1bNm+oKCwiK4B0d3CogfbZ/nyTfi6fKkHpc56Cdu1s/H19Xv3jo+KIQUHd+3aW6I9Pv7r0aPH+/UbIiqqWNxjyMEPh1aL3t5rSec/fwrV1PQZTadCtuwRq+rVa46NF7SFMyrqA+4swkhCqlev4ezVx48jKLqu4nzeIe3a2Vb2Nf0NH7Wx6Xb//qMSxuB/haArmHTp0qNlyy6k8fLl60iWIvlsZmZ9+TKfjLWI9PTsBQsW6eg0pEqhra27efN2AU8AAAAAAAAAAITh39GDubl5EhJkK1y5v/CpceM8+E5y/vxl8oNcRESdDWsUQHp6hoyMFo5CrH0xSEdXGhu3Z405ePAUMd7BYQJxdJqZtRs50pU0urr+LeDOFIVX5lGC3NjIoUMdb968xXexMTEf8F3okvSbNtF15Feu3OzgMEJNrd6nT/Fst7i4uHnzFqqo1GPEIPeZT548k+0zYMBIXvkjIqLYs6fd0qWroqLe5ecX9OpFF6EQELKL2LfvBB4qx3gVOdev3yaXrKx6oim7dOmVnp5DWmJjP2M3X1USwP59RC4uHikpabzGvH793sSkBUU7JZt7e3ujR1SEv5DKyo3JgyrNhQtXtbUbMA9Akdn0Sp4YSXNKTZjgefXqraNHT5w5czYqKkrA0wAAAAAAAAAAvvw7evDEibP4R7QS4zgTEBlIzZvHvyx4cDA3N6mUlM7Pn7/LvenJk+eLC6ta+6hjn5poXNxXYklMzCeSFsbauidp2bkziJEYmkRirFz5N4LU1XUOvqTCJHKhOnfucedOON9lrly5qXNn27AwOsfOoUO0ENu5c19ubq6+vpmWVj1z8w5IfE2bNvPu3b8lGn/8SJ03bwHj+EMvRWTWrAXk0vz5y8kd1dX1R42aFBS0/9OnYhsGY2JiSYfAwGMCHv6UKbN5loD0lBKSkG3bdmcakTozioujheru3QeYxqprcDp8VFVVe9u2nSXsWbNmk4YGXZVyx4696NTY2GLWrCV8Ld+xg301XM+jhIRmmzbdLCx6Kys3Yp6YClUMse7de/FWgQQAAAAAAACAcvl39GDnzoOYH9gkbYh0GSlGaT3o5xfId5Jdu7g/1Bs2LHP/IC+jRrng7rUfLEqHX65d+zek0MCAm1B08eJ1bOO9e0+0tBowFtJb8/bv526cnDzZA+tHFSx+FXft2lN6dZ8/f5882V1JSRep46tXuU7DffuOoCHXr995+PApOkDSZubMZQMHDjE3t7C3dxo61OngwcPsDK9evbG27oYNoP1fnTr1Pnv2MpKWLi5u16/fyswsWRKCMHUq0aroJYp++vRVwPO3sOjB89LZoFM1NshTSUkzIyPT358k0qmWJ/83fLR9e+vw8Ad8DUOydNiwCXwvMW5ccVLv0sDALCAg6Ns3rjP69+/fISEXJSWV8ftSxkKelDIkdRjl2fqSAAAAAAAAAFAu/4geTEpKwXvfSGU6Ws1ZW/fDP7lL/55Hv6s5UVHRfOcZPZobXSlMnbi8vDwpKW2ekMha+9BrbNq0LWuJl9dSxv9FxzS6uExjL6Wlpffo0QdflcMPROTEibNIqjAtlLV119hYPtkvX758Qx5Fp062v36ls+1BQXRGmoCAg+npGV269HVwGE/RpTe6r1ixzslprK3twKFDHfv1G3zt2t+g02XLVvP4uUQ+fix5u6dPX7x48Yocx8V9wd1InhnKzEzQRsKMjGwVlfq4P1/tT89gZNShQ4deeMJqfAXqbHrVqVPd2OSihP79R9jY9ONrcGzsJ/IQyMP38JhRus/9+8/ExSVxBz7vvVWrbqWHAAAAAAAAAABf/hE9OH8+URya2Ici+/jxcyurPvxcQrRAMDGx5jtJYWGhjEw97JaiNm70K/emly9fw7dQrD6VQepHSOI5Bfgc6awvT59GEjMiIl5jM0hWVa6PzMamR2rq33jXWbPmMx43GUaX0T3d3IrpkcTElLNnz+bnF/74kda9+5Djx4+lpZVMqkP04KpVdOjp1q2BxsYWlpZdoqNjCwr+bN68DUnR0aMnDBo0zMqqx8SJbuyoCxfCxMW5FQy1tfVIvs34+ERy9fDhYNSBHA8c6My4/FSJvF2/nv8WPEJk5FtmRXwloYbAq1VXhXQmGRUVLV9f7hdm3DgPI6P2ZVnbtm1PbAytJVes2FC6Q1ZWtqxsPZ4nwOfVR0S8FPBAAAAAAAAAAIDlX9CDhYVFior1WSn0+vX7jx+/ML+6S6gtWpJcuXKD7zxHj55mhJLou3cx5d532LCJZf9ur9xHWVxcW1e3uUAJQ0taW1tH1owmTdowWg9pYc369ZvLyuqShTx48IztdvLkWQMDIywolElFiXXrtvAuJz09w8amP0Xn9rR89OiZtXW/y5fDnj6NuHfvwcuXr/LzuRUb9+w5gPUyHaq6cOFqFRWDLVuKxZpu2ODj6Og8bJhz9+79LCy6Rkdzn+SbNzGamtwMKtLS9UxNO4iLS4WHP+EdGx1NEodK8yxWHAnkjx8FBUmeOHEBj5Kri8Ddv+GjZmaWDg4jTU2t8vLy+Np5795j3JGuxjhq1NQy1sK7DZb/2583b7mApwEAAAAAAAAALP+CHpwyZRYjoMSvXr2DWho1aolP1Yr/bqf3yo0YMbmsefT1WxGhZGnZv9yb/vnzR1pap7qDRRUlJKR8fPzv3XskLq7ITxKqYe+hWELCd2KGj89usnJJSWVf3x3x8d+QYampP48cCVZQQP0pP78A1uauXQdig8VQ+759R0kjkn5ubl7Pn7+8ceMuau/fn878uWbNlsTEZG/vxe7uXuPGTXFyGufoOHru3MVIfB8/fpZXD+roNPP2Xlni4SQn//DwmGVrO2zIkJEGBi2PHTtN2hMTUxs1Msb2ihGzZWXV0a3ZgT162JdaNe3zLVFcvjSrV28WKKNq+kPLtFatOnt7L83Ozi7LyMGDxxCdLienXVaf+fNXlPrqqhbPj0RZWPQW/DQAAAAAAAAAgPCf14NsBpiWLdvFxNCrmDjREzfwJpPkisEOHbqWNc/u3YfxKDqc8urV2+Xe98iRU/xckFV3NknKyiokJ//KyclVVFQvJY7olgkTuMUTMzMzRURo2Sgjo1baoZmb+6dhQ1p8kWKLK1eSsoNITlIBAX9TvpD8MK9eRe/cuU9BQX3SJE85OaXExKQSsxUWFllb2x4+fDIy8g2vHtTWNlm0aDXfR3Tt2u2+fR3HjHGtX99k2zauDzEl5Zemph6zLno5c+Zwc71GRLwiGpH3lRFI1YZr18J37/YPDb358OHTZ89eoE94+KOTJ8+6us5s3dqSSb1S+2KQbFk1Kfc7o6pqSJazdOnGsvo4O7vhLiXEpjx+LMQTTWlqNiv3XgAAAAAAAABQ9N/Vg/Hx3wIDg7p2pTcJNmigt349N/Rxxox5+Oe0Ao+HhbvJq3Pn3n/+8J8tPT1TUlKF5MBs0aKzMAb07u2Ib1SNwaJ/xQVFcX7+zMjKypOVVS4uCSU5HNmkpBRig7s72RVIXbp0gzUsOvpvSYIvX74Rj6ehYXtmZyK1atVm3oV8+BAnJqZiZ2dvbW0nK6uupFRPW9vI1XV29+52kya579178NmzyFevXt28eWfVKp8XL16Fht7g1YP165tNn84tJEE2G8bGfkKvxt5++JUrN968edepU38Xl9n16plu3x7A3PELs5dQtGnTtnl53EhUZABu1GSVoJlZh/nzF61duxVdffjwOfUXURzWK0EVQ6Mu4kVJ4Q+R9+9jBX9hcBENWeIfRI+orG6jRk0ppQelDAzaqKgY4Nenhm8n++kTnxRAAAAAAAAAAFCC/6oe/Pz569q1G93dp505c460IFlhZzcE/5aWY2QaUQc0JRKnlKB7dzKQ3tj14METAT0JiYlJHI48/mVeU/4maWmlIjrw8iePJKS9aaNHuxMbkpNTSA2Cli27MC0/2rfvgiVte/R8SGPfvmzld1oUjxgxqcRakLJev34TUw9diTyE0piYmN28SVcnDAg4yKsH9fRarlixcc+eAx07drlyhS5QSDYhEpYt25Sfn49a3N3namkZHzx4gtz01q2HpEOzZh1JC3rsuIGb7qZJk+Z79hSrCaKpacAIJQUsrGTwR67uYkTJh347GzaUrEVYGqSOydK0tAR597y8FpbSg5Sd3ZhZsxbjdjVyRzYEFwAAAAAAAAAE8F/VgyXYt++AqqoO/sGsjHUTqUBHY2zc/NKlMAFj0Y953JEOvBw71lOY223atIP5cV5DKoOWhHp6xuheCQnJMjJK+HZI/YmxmVVcXeeSBa5ezfX3TZo0nbif0N/w4RNJI7Oxjp6wQYO/MY2xsV9iYz+zpwEBJFyWdiAOGOC0c2eQgoI+uh2Se8rKOuvXb/f0nC0pqYl6nj59gVcPmph0XLRoTcOGdPXDwkJ6qqNHTzk7j5eSIq9DJi8v79u3xHbteiBJWL++6fXrd8gd163bTuwfP55+5oxzkDI0LKkEi+jUPRNKqSSV6qgvX8UPrdAbNRKqVOXp0xfJAq2s+gjo5ucXWGql0ioqui9evNLXb8G+yunTFwpzUwAAAAAAAOAf5z+vB+/cedC5c3fGH0WcaNxoQx2dhuvXbxI8/OZN4qiifWeamgZC3lRfv00N60FVomfJhsfY2K/y8rSfqEcPB2JAbm6uuLgaySm6c+c+0mht3RdbRYuUbt0Gkcbp070ZLUm9ecPdY/jgwRMtLQM9PbN585aamnbU0TEilR0Q3bv3/v498cKFy8QhJSGhnpr64+nTF0OGDNfSalpE5yk9x6sHmzZtN3v2ksOHg9Etdu8+UEQHRn50d/d0c5spKqqFenbpMhg1IjnTq5f9tGkL27btER5+n7i3LC1JTRDpkSMnof+ZmrbatSugqBSHDp3A3RQYmc+WkGCRqtaqH8J/aFfdkSNCueouXAgltnbubCegGxLpWNFLllCdwcEX3737iCeQpSiRxo3NhbkpAAAAAAAA8I/zH9aD2dn5U6e6M4pACcsEbqyjrm4TpASzssrM9Ej4+PGLmJgs61OLiIgS5r537xIJKV/DWoObUMXObgQ29aucnNrevdw8MFu37sE20B28vBaTxosXw5inIYZkMmls0IA4lejQTXYJb9++b9HCku1MawxZ1fXrt4eF0QGfSGwipampqdu5c7/Fi+lcMZs2+aE+CxeuQcf79x/j1YNKSo1CQq6iR03m2rDBj307oqKyJAw1MvINahk5cnKzZuZIvaKWPn2GoZYfP34SoYrYs2cv70POzMwZO3aSv38QOl671pdHfdMq2NTUcsOGbatW+QwZMlpX14SnqGJZyNSMeBeRktJCj0uYrw0uNkHv5WzRwkZwTzs7koaUd2uqmLp6Q3Rp6dIN7Cvbv/+kMPcFAAAAAAAA/mX+q3rw1q0HenoGjG9Ik00t0q6dTVDQATZFiQB+/87S0mrEOJ4of/9DQt66Z89hpX6u15wkpN12Li6z0H2/fk1MTeVWh9fTa81IJBFVVX3WtufPX2/fvpUVthcvXiePRU/PrIjWaH8FckTE63PnLpLj16/fpKdn8K4xPPzRsWOneFtiYmK/fIkvYvQgkmPoeNmyDSYmHfv3H/ruXcyxY6R6I/Xo0YuCggItLV0sW2i5jRQ06hwVFY2OJ06c3rVrf1bYrl69FQ8SRaNIS0rKDx8fX21t+tWQMovfviVi9S1GxKCqqh6vYYWFBe/evbt48cqGDVtcXb0mTPBwcZk9dqyHvf241q27aGsbKyg0FBFRrZngUsrCwlbIrw1aBYdD+3wVFBrn5OQI6Ikzz1BYPGrwfBPQ12A2ujp79iJW5Z45c0HIuwMAAAAAAAD/Jv9JPXjgwEnmFzH6wSxCHECOjmNu3Lgp5AyFhUVmZu3wQDrkb/p0byEHxsV9xsJEqubFIPmQWgPFkoJev34HW67IJIGhxo3z4Gutri63wMGNG/fQ6e3b94YMGePnF1BIdvoVFb179/7p0wgh105AUo7Vg8uXb7K1pdXx4sXri+hQ0hCKzpfS2NTUmnk7VNu2vTIzs+7effTnT6GJSQfUMnXqLCTWfv36TSbU0aGLYujocLOsREfHMC9XgsOR+/EjDd+I7IKk/aHq6k0FW3j16pUDBw5evBiSk0PXhc/Ozlm6dB2WV9UuCanu3QcL/+h0dEzxKiRjYj4K7rlnzxHGD6jB6EH6Xy38/Oidldev3+7bdxD55k+c6Pb1a7zwNgAAAAAAAAD/FP89PRgWdofxntDJT6Sl1WbNmvv2bXSFJrGy6oknoX/SDxo0SviBDg6Tass5yCsJ6aDKoCBuck4iwdDSxcQ0+/RxJD44JIc/fvybH+bBg0etWnUgssrSsh9pREoQnY4c6UpOsUqiFBR0goKOoNP8/Pzz50O3bNkyb95yefl6SGwaGZlPmjR92bJ1np6zZ86cFRx8HnXDWwWpY8fOoANpaVkPj3lSUlp6egahoTfQVVLOHqNJHq+n53wkPzU0DJ48QcKzcO7cpW3adOvVy37GDG5GlFOnuIlWjh07S1pGjXJl5aSj41TSqKZmwDx5av16P75vZ+NGP4oHDQ1dsrSnT18w2r+a9aCaWjnilBckHolh+/YdKbfzxo07eP7R4+/XYM0abtxvamra06dPHj16kpmZKbwNAAAAAAAAwD/Ff0wPZmfnSEtrsf6jmTPnfvv2vaKTDBzoyDqbrKx6CT8wNvYTdtlI16IYJB+uG/TqVbriw7VrtxivH52rMzk5ddeu3aRFWblR06ZtTU1tmABa2qn07Fkksf/Tpy9duvRjK6ejS66uXq6udMXGhITv9+49at7cKiwsLDT01vLl6/r0GdysWQdra9uuXfuNGjV+9+7A+PhvaNTBg3R2l92797948bpbN9sxY2jtZmHRNTr6A5kWpznlMOKLcnb2JPdydOSWutiyZZetrUOPHkPevn1PWpo0ofPz8OZIqVevKfZ+0nl+oqLoboxsVESvQEREMSOjpAg6fvwseQjjx89Yt27bmDEe5HTatAW5ubmSkqrFk7RUy4d23YaHPxTy+0P0OMLaeoAw/U+ePC8vT5IjyWA9qEEkoZ3d0MTEFCFvCgAAAAAAAPzL/Mf04OrVW8gv6k6dur98+aYSMwwfPpZxFVEtWlhUaGzfvk617hzklYQIqagoWnbt2LEfyRzWsBkzlgwaNGLTpi1eXosUFRsQ7yH20FHt2/dmu/3584cUbujRY8Dr19EcjtyiRasUFbVIrpIiWqnt2LLFd8+efSdPnjtyJPjIkePovydOnD1+PHjduk1IxxXRwbrHsZeKjl/19Q1o376npKR0ePjfoo3OzlOxARxcHFAazY8aZ81aKiKixvZxd5/bvLmNlxfXRXj06BnyWomT8dOnzy1bWuLsqfSqO3Tg7tFDQxjPIy36SrwdHZ1mWJ09Zlu+f0+ysemBGn199wwbNhmPrd6XQjsr+/UT1r+clJTChPhKfPuWKMyQhIRkJ6fRzD+AkK2y9JZSKSnldes2CnlfAAAAAAAA4J/lP6YHGzem86jMnbuocsNHj56Cf1fTv6jNzNpmZeUJP/batXA8Vq4uxOBfSaisrPX9+1/fUExM7IABI0vsf7S1HcQ6By9dusp7CUnCK1eua2k11tTUU1BoiN2OHCurfl5eiydNmjZqlIuhYZvmzTsMGOC0bNmakJDQqKh3X77Ef/wYt2WL39ix7kV09Qd68yabX1Rb22Tjxp3v3sVcvBialvbbxsaWSBc5OT0cyIoeFzVkyJgiWolPnDVrCTEDicpevYYOHDgqNpb7xVNTa4J6tmzZxc1tDqm0yOhuehUkVPXJkxfMK6D9hu/efeBdGpJaUlI6pV+curqeqqoBunvNaHlkCSc6upz9gCwDB5J/jqAcHacIOQRx48btgQOHkvIiTJ4ZGhOTlocOHRV+HgAAAAAAAOBf47+kBxMSvlMVyf1SgrFj3VjPoJFRq+zs8nOQsmRl5cjJkThVjeoWFLwfcaxVBVRGoB1S+vrNWMOQEHv8+Dl7SmofREfHknhRbe1mvKuIjHybmZlFjt3dZzCaUZPNS0NRkoqKjXhOWcQGDXIkA4OCjvLqwYYNW+7ffwzHiFJkoKGh+ZUrofHx36ZMmcmO9/HxR/3HjHEdPdqFzIPeY6NG5nPmLCWny5dv5LmdJI9wox+7iooBUrJFtNQlsb50o7l5N97VaWmZcDgSpd9dQABt8NSpXpKSWjVQJYSWZlZWfYX8IjHZcujCE2xKVSGJjHw5ZYq7ggIbL82trtKzZ5/Y2K8VmgoAAAAAAAD4R/gv6cFdu4JatLCq3Fh39zn4t7MiRRdfMExPz6rQcCaFS81FiiINqKihYSQpqVNeGky69IC1dckyBy9fvunQoXtc3JciXFeCw6GV4+LFa9kOJ07Q2+sGDBhz+fK1mTMXTJ7sweEoYnXMrQA4c+bC7OycnJzsDRt2KCjIy8o2aNOmm4iIfNOm5pcuhXXtakc2AJJ4UVYPGhtbeHrOx6lppERF5XftCuS16tixk5069RATo3XixYu0p3Lp0vXDhk0ownlKLS1tnZ1dCwoKirhFJcRLbfFTIx7GLVt2kZyouFy7CFOAnjpy5Ax7r6AguhDG0KFjSjwZHx96c2VExCs7u1E18xJpD2ZIyNUi4XB29iQPXEenArloWJKTU7Zt82vb1oZXrsvIKN+9+7j8wQAAAAAAAMA/xn9JDyIlcvHitUoMnDdvBeMLQz/CGyclpVZo+P79J5nhNSQGVbFbUHrTJr+mTdsxxSxUsOQp7StUIy7OiROnE/O+fk0g+WTs7ccQ0fT8+UuSfyYu7m/G0cOHg8XFFUo5/mh27PAPD6fr1yNpZmnZbe5c7/btu6H2c+cu//z5IzMz686d8J497ays+hQx+WR440UXLlyFjvftOx4aeou93e/f6TNnzp84caq9/Xh19cb4PiIrVtC5MdEakThFytPDY17nznaHDnHzpnbs2A93Uyv+WKjJk714X8eIES64myZFcaSlNT5//vvV7dPHHovl7qGhN1NT0379+h0SEkbRSWjV0dUzZ0hGGqXqfne0i1BJqYGQX6ecnFw5OR3y5AcPrkBu2xLcv39/7NjJ0tKqZCqk7kltDgAAAAAAAABg+S/pwaysijn1CBs2kLz9tJtJQ0M3Pj6pQsPT0n5xOPI4wK/mnIO0L09RUT85+YeRESkSocrhqImKapThK1QjsYIbN+4souMhD6Hj/fuPsTZ3707vHzQ1tSmxFh8f/969+yFBt3fvkYEDhzdvbu3i4nrhQhjbIT8/X1m5ETZAokePHkhCFuGcro0ataDoGog+RUw9elYP6ug08/ZeWeJGBQV/TE2teBSnLEVJtm5ts2TJenv7YUjfEZ+gt/cqA4M2SBWSUVu27GL0oBI70ti4HWtbZCSdQSgpKRlHw5KYUkpKSt3aunNgILeCw4gR40oL3ps3H5CrZmbEqpoI+qXGjZvG9yuUm1vye3vjxj1shhj68/Kq5GZYQkLCtzlz5pOvN/skAQAAAAAAAIDwX9KDleDIEVKDQBp7cDQ/ffpW0Rl69hyKZ6iiGFTDyk6qlFKRwu20vy8w8FARvQnOGLerIDHYtGnbskvmqZMdgk+fRhYWFnXuPGDt2jXJybR7iImMpVasoPNPsqXn8XH5642Kit68ecf797G8jXl5f9at23jpEu2cLbF/kK8eREhI0Nvcevcecv/+k5Ej6TQ+CgoaQUGHYmJifv9OJ6US/fzo9KRIHiKth05fvXqD3ZroZcmpqDQYPtx51y7/X78yyIQfP8ZJS6tdvXq7iE5NM5mRdepMlhVu9Y0iukLlTVdXj+bNO7Rq1XH27DkxMX+/21+/fuNwZGtAEqqRKpDXroXzPoSDB0/a2HSRllbX12/m5ub56VMCe2nx4vVEd6O/5curmikUvbV69RqjZ1zFeQAAAAAAAID/GP+yHgwPf8p6YcTF5Uilhgpx6tQlPENVIkXVeWSgnKamkaFh+5YtOxkZddDSMiJuHYoOcaRDMbOzc+TkGmBNJKKtbTxp0gysF8raTkiHKerqmhXhrDK06JVWNTa2YPXvtWu3ySp27tw3dOjIW7e4UiUhIcnbe+miRatKLPbDh4/v38fk5OQmJ6d+/PjhyZPncXFxSEC9f/8hIiLi+/dk0u3/2DsLsCq27v8P3R1iAAooopSKICploKgYYKHY3YmNiaBii40dqCgloKIgKiqoSNliYYCUgoSCwH/P7DPDcDgcDnHv/f9e1uc513dmz+457/OcL2vttfjoQaQ3d+8+dPjwaXR9/XoUUmoxMWRuvtzcH6KiSnilSkq6Wlp60tLNMjOz09Mz0BodHEYFBFzFPejqWqA6bm6rf/3K55peSUnJgwcPk5OfV5AJKb5Qr1WS1mKkPTEk5Log7zQ6OpZ+HY1r8CVfh6ysxp8/nKC1vr5+XOJfWFji8uUwZiY4dSMOwrNx4zZBJs+HtLR0Y2OrrCzISwgAAAAAAABU0mT1YE7OT0lJnMubtB/FxibU3qYqZWVlsrLNGyAcVHHsF6Tphg8f6+t74vXrtzj+J6ak5A8qOXjwqL19v7t3SYfGDx8+CQlxUgn4+QVgz0y+Q5A6KCwsArW1shpCyw60ahGkK4uLiysoN0vqTCLx6lUqHtfSsh9BepN2mz17UWYmqfKuXr1+7tzloKCQ06f9fH1Pd+lih22XJiYWM2cu3LvX59Ah39Onz+JgNXz1YLmyMnlUsFWrjqamtgSZp56T/fDhwwQVFbSZwp064c5l8NnGdeu2du5st349Rw1hiTRnzgr2iygvL6v+doYOnUi/GlJpTp1KZr1HGtPf3//ChUv379+v3oQhJuZJs2YtqebSjaoKydc9YMDoCspjVkSEfDtbt/q8ffueMpJOx6/n8ePKsKJOTuMYSbhkyeqapywQL168/v49q4GdAAAAAAAAAP9LNFk9aGbWi7GUhYTcrL1BNaZNW1KbHOP/URAWlp0wYUpSUrKAI6amvseSYcuWPeh2/vyV9AQUa7ASkgfoHB3HVJC+l8dpMYjKib59nXGfSIEGBoaJiEgfP34W3f78mU+QoUTXubltQhdJSaS5bfr0xc7OY8LDbzx48Dgi4vbGjd79+g0xN7e1sOjt7Oy6ePGq48dPMfnT+fuLbty4vVmzVvLy2nJyms7OI/BCMDk5effuxbx//wk179FjEC709vZBPaxa5YFvUbfoabNmBrm5P6Ki7i5YsLRDBzMtrfbz5i1Cbdl79ezZS1rQkYFGv30jj4Xq6JgyxrhOnbqXlvIQkpjs7DxX14l0XRFazTVcD5KTuXPnYWFhEboYM6ZKkkFvb3J1zZu3Zzvxjhw5gZGEEyfOEvCrAgAAAAAAAACC0DT1IBY7OOjK9u0H69FDSspLWk7WWyZIi4tLPn6cIvigP3/mOTo6RUZyhM+gQWOq+huq8DJmEYqKOqhycDAOnonPGxKzZ7sx3T59mqympokKAwOvhoVFEuSxvuFCQuI9evTFFfLy8rZv3z1nzoLp0+ctXLhiz55DQUHhMTEPkFp89uzV7dsxJ06c0tXt4udHhpfBetDbex+63rJlr5KSzs6dVXb4y5fvNS1w8WJ3GRk5WVnVb984Bznv3Int3LkXM9sHDx5hlScnp01Uo0OHLmvWbEhK4mypiUkv2kQob2BgaWU1iH7pStgR19Z2EP8Nv3fvgZPTKDExBdYgyg0ThmgyYoqKmlRsHKEVKzy4RrSzG4zGiIq6xy6cOhVnxiQdmwcNcuI/ZwAAAAAAAAAQnCaoBx8+jGcsgyNHTqlfJx07dqc6aUjUERxjRNzLa3v95tC7NxkmdOzYGWfOXJw4cS51rrB6eBkxglCoIIXVA1oPkl6yu3ZVarRfvwo+fPgwahRjDmuG/6dv3xFDhrjq63dt3767qalds2btlZV1jY17TJ26ePPmnYcPn/D3D756NTwoKCQq6k5ERPSrV28raD24fbsPul6yZH3btubjxk17+PAJzyVcvBgwYcKcgoJCfIsmqaio0atXpeRZudLd1XWmi8v0N28+4KlKSmJPTiFK2amyNlOO0WxHjpCxd+7fj6PuJHB6+qqSmZOVA82w1n3+8uXLnj0+zs5jlJQYEarYAD9S0uNXTIxMh9GpUy+usYKDr6Onq1dzR+BZvnw9NS7p29ytm3VxcWmt0wYAAAAAAACAWmmCerBFCxyik2jVqn39ejh27DwtChroQKiOjT4DBw7FDo11wsTEulmztsytgUFXSiVxWyHREL9///nx4ye2rGGXxevXo6p3iPSdtbUtZTtTZmkoDsOGjffy2j18OHe+BjMzu7VrN65d64U7wXrw2rVbOIjNkiVrCdIH0iAmJvbePc6Hun6IpCLTyaxZbki+oXI/v0DUQ1BQGKp240YUZcUbPGrUFGlplb9/SfdOuhUflU2IiMh+/56DKq9Y4cFSgqqUiFOmS8RwJJ8HDwRN1P7z589Tp85062ZH96BWX1uhKjU0KWCRAGQPcfv2fYL01+WRY2LnzgPMdhkYmGZn/xRw2gAAAAAAAABQE01ND3p47KZ+UJNCICHhRT16KCoqFhdXoYxxjRJphBPwRF5e9cwZvzrNJDAwLCQkgrnt02ckL6FEqr/Hj8loOa1amdJrl8bRY8rKyqKi7lla9sYxTD59+nT06CnKoKZE/Uu0a2duZtZz8GCnt2/J4KtxcfFBQdenTp1GkEE+1/v7BwUHhyYlPffzu4xKnjxJrKD14IULgV+/pisoqC9YsNLZebKlZR8DA8v27buxP8rKuh07WiK5p6amq6HRAVUwNrZGt7a2Q9AFqmBo2NPWtpeHx84hQ1y1tfWxHrS0dOCrB/GWEm3bGuNUFAcOHGvWrBUtpCQkJZvPnr182rQlzZu3x0Wqqrp1/Q5cvOivqalHtZZvwNeAzAkiLCwTFHSV6XnoUNIHOCrqLs9xz5zxZyShpma7799z6zpzAAAAAAAAAGDTpPTgjx8/RUTkcWgOpAjq18nMmcuo3+PqlLenXCOpQk7U0NGjXbFhq658/pwhJaVIibjqnRNz565Cdc6fv0INIiQtrfXzZ14FGcK0dNYsN6pQlMltwbBv3wGuUQYPHovKcfb5CRPm4MIlS9wJMleCzLt3Hyqqnh90dZ3TokVrPT0TJ6fx06Yt5PosWrRmzJgZZma9Fy9eO2/e8uoV0Kd79/66ukaysqqHD5/Fww0bNp6anTK1/7KUIlOgP4rUh+MT26qVwY8f5DILC3+jtXTvbovXNXHi9Bs3biLxGxl5Z8wYMoDn2rVb6rrhxcV/XFxwpBepBrgNq+MMgzo6puPHz+nYsRu6dnAYwWfcq1dvMhkVtbTa5eZy590AAAAAAAAAAMFpUnrQzW0D9TtaXFpavbj4dz16eP78NdWDLPUzXk5JSYeSIQ3Xg9hahKQNoaCgfulSYJ1mlZj4XFOzLe3BWL1nMXFx5cLCIlTT1nYIpSNMysoqQ2taWQ1q08bQ2Nhq0KCxmprG2tqGZmZd9u49Un2gZ89edunSvX17s2vXbn75wkme/vFjmqmp+eDB43HWeKwHd+4ktWRmZnZwcKiFRc8HDx5lZ+dU/+zadVBBoTWqxvMp+qxYsUlbu11s7COczAIxezYWsEqUiZYPpLy9e/che/6RkdE2Nv3wYyOjrleuBKPC+Pinfn4B7A0RnA0btlCdydT3zwISbdqYLlrkLiFBClghIcnly91rHTQ6+iGzyLZtTes1cQAAAAAAAAAgaTp6sKSkRFKyORYRa9bU2R6EMTa2psUI4e7u2bGjNWVZaxQ9yKhCkvnzBTVf7t3rS4uDmiQJeaRu/Ph5qHJ+fgGShwoKOvVbviBERd1Dw509e+kf6n/PniOUpDWOjY2PiYlFw928eTss7GZQUPilS8Hnz185dcoPyUz8ju7ceVC9h6Cg0E6dLPGWzZu3onqFOrFixUaqp/odJlUUFiY2b95dQRoxfwk+6I0bd2irLtG797AGLgEAAAAAAABosjQdPRgYGEb9hBYWFVXMz6+Pl92xY36MGFy6dAMqERaWomxDjagHsSSURkPY2NgXF5fwn1Jo6A1aDPJxWcTRS4RwMsHNm/eg65UrN65a5bFy5Sb02bHjgIfHjo0bvdeu3bJt276tW/dt3Ljd3d0LP+X6eHnt3rRp++rVm5kSdI3abtmyBwnkNWs8HR3HEoSUre1g1P/y5RuQXEIVli/fiK6rf9av34pG5PkIf1An69ZtYZqjW2trMm2EqakN/53Be8hTD2L27z9Ciamhdf4eVKNbN/vaXgGfV4O+P0RYGI/wPvw5dy6AGpQ8Cbtx466GrwIAAAAAAABogjQdPThixCQsnOztR9WjeWFhsZiYMmWREUGKEpUEBFyj+mtcMcjIBNKBsGNHs8JCfn6tffoME1iJEPv3H0NNcNjPfxju2KSNjVDnztyZGtgkJKTgenz0IOLJk+Rr127V48vARX7+L2lpRUqa1eNdq1NRYSXT0+scYHbJknXUKkkrIXqtDV8IAAAAAAAA0NRoOnqwdetOWCP4+p6pR3NnZ5xnAf3sJ8LDb6MSCQk16qd4Q7KT85eEpJ+nqakVn1nZ2g6mZlWrHpRBU42Li0dNjh49Y25uvWfPwV27GvTZs+ewj88x9C+r8BD6V1y8mZCQ0okT5/z8rty+HRMVddffP/jcucvoc+NG1OPHyUij+fkF4BKuz6VLwYGB4Zcvh1y8GIg+589fOX/+MvowFVAJlXVRSEZGs2fPAb17D7G37x8REV1BibKFC1d/+5ZRUZm2nrh37yGf3Wsw5QUFhdjWPGrUdGrA+r1rNepFW9djBh06dMMrNTe3b+zVAQAAAAAAAP/7NBE9mJPzQ1QUG61EEhOf1bU5fVyLzN1gZTUIlaxfv6Muv//VcFsqyInggUc4VsI5c1bVNDEk8XC41NokIRpXOCEhBTXZvfvQhg07GrKZ/JGTa43kJ77Ozc3PyKhi9vrw4W0D+1+4cBW13srs8x4e5HKKiooPHz6Zk0OmYLh5Mxo/wok2GoX379+fO3dh2TJ3V9dp9vZOxsY9tbWNFRV15OS05OVbq6joUS+r3vKfdEJeu9a7rrN68eItYyK8e5efMRQAAAAAAAAAqtNE9OCTJ4n4N7OQkCoTGFNASkv/ysm1oH51S6H/kLQsKSml4ouKC2wclFFT0+/b15nqRLguB83w0T8iKalGDRsT80RJSZ2aG6MslGkRKkmpRTxJwt19K6qfnp5x8WIgO5zmhw8f67Qh5eXlN25EjBs31cjIbPz4SW/eVGkuI6OF5GcFZbBTUdFSUNDNy8untrG0Tx8nLNOCg8PpoT8dP35q375DO3bs37hxu76+uaJiM03NDg4OQ5YuXXXhwmU/v0v37sWiDWf6p84nspU4MW7cHK4ZLlu2iaoj/fz5qzotrTovX75yd19vYmLBVqBczqvUR6ZhlmL8sohnz17XdYbOzlPwPPr04ZeoAgAAAAAAAACq00T0YGTkXfybWUKiFTYhCY6T00SqKWmq27p1PyqZMmUxVSK4pU9EVlYTNbx8OYgWEYJLQjI14eDB4/jMsGvX/vR85KkLJZzmnjJaqVIl6F8xSUk1nHbwz58/f//+xW39/UPQ44EDXVxcprdvb3zxon9Z2d+aBrp7N2bFinVycs3ZckhcXCE19QNTB+tBpDc/f/7WtWs3RUVtXI5EKKrcvXu/Nm0MpaRa4rfQo8cAN7dV0dExt2/fv3Ah0NFxdJs2HZo107OwsJ04ccbBg76xsY+55tCz50BqWDFaoUsIC0uHhl5nKty791BSUoGU/kJqX7+mC/6iuXjz5oOr6wT8ZwT83WmYBbAms6Ai+0Xr6naq6zxfvXpLT1L8+/eseq8XAAAAAAAAaII0ET149y4nZZuIiMb373UI3HH06DmqHXlsUF/fHJVkZGRSJXUKHoIqS374QO5hREQ07eAnuCQUERFR5pKxaWlfkpKe/f1btnv3IVFRaUoJkmJwyBBXKiuiPGU4m/X+/adhw1xokUjs2XOYa4Fbt+6l9Y4k/h9tbcPJk2ft3Xvkxo2oW7funD9/xcPDe9y4GQYGlowGnDlzYUzMowraODVv3nKmQ6wHkeSsIKUZeY7PyckFXR8+fEpaWsHPL0BLS3fQIBdcGS0hPPwGIioqOi7u8f37cZGR0Xfu3IuPT3j9+n1Kysu4uEdeXnvw1mEMDXuSi1HSbdnSkJkzwsLCfsqUBZR4F8dmO2VlvZKSWgK01vzez4iISNEysNGPiKoyiUVosImQ/JuDp+feus7W0NAG93Lq1IX6rRcAAAAAAABomjQRPYgEBW07k3jz5p2ArR49SqYVAfoQb9+SjpGTJy+qo3FQhcp9IBEfn4i7PXcukOpBXuDmpBoNCAhlzw3JUjqYDEH1L0LJtKXokZqaPk6zOHr0NFy5XbtOuGT+fO50e6WlpUFBoZ0721E9qGHZWBP6+mZ79vhkZlYaoaZNm4fKnZ0nMSVYD2IhFh5+a8AA0ke0c+eepqZ2GhqGVDQYMuufp+euSZPmuLmtHTZsnIlJd2Pj7ra2g8aNm75583Z//+CYmNiUlBdJScl+fpe6dLGOjX2COy8rK1NWbo96WLdu69+/f589ex4UdHX16k02Ng44AT0NaS8zNOwh8BekCps27aT7qUcKCQE/RLt2nQ8ePIEEcu/eQ2jBrkrZPSWzs3PqNOF167bh6c6Ysbh+SwYAAAAAAACaJk1EDyJUVfXxb+bjx/0Eqf/1a6asrCr9Q53Yt+84KkRSiDopJl7HH/+oicijR/FM58OGTaiLqCTNRnv3HuGa4fv3Hw0MTPGixMVlFy3iaD0JiZa4kLEGzpq1DJdERd2roDQg14FEd3cv2kqlQH0UGWUlJqampdVh2rRF+vodu3WzYZrcvBktISFDqTB5JaXmw4eP//WroILWg0VFRUzNAweO0Z2RQXWEhJrR3p58QOJUTkNDz9q699Wr19hLxqc4T57k8RK/fv0WFnZ940avTp2sCDKxyHBBXjQX/v6heN11lPyCf0jLoKVlr/LycmZQMzMb6hAi5+mECfPrNOfo6Pt41/r3H1mPJQMAAAAAAABNlqajB4cMcaV+MgsbGFjWWhmJwRYt2tCSjXBymoDLly3bSBXW1YEQ6Sbhhw+fMP1//54lJCRTF6dTYsuWPTynevfuvatXgz9/5gTJiY9PoZQFQopxMVVXb4fuly5dj2/Png3s08eJ3QmaG9VETkZGE/2PsnKbw4dPennt7tbNTkREtKiIzIH47RvSwsSKFZtwk9DQCKqJND74hsBxY9h6MCcn78ULMkBKenqmu/t6SuXh43L43Jwqrf6UW7Y0FBLCydnZYVukJSWldHXNCgoK8aCXLnEOYPJPLLhgARmDdNy4WbW+aC6KiopFRVWoEf45y6C4jIxqQUExe9wDB07gfaA+4gQhkZv7Q/Bpp6a+x+ZRY2N+2UkAAAAAAAAAgIumowdpGwppXdq8eSefmg8ePFJVbUmLAqJLF05iuJKSEknJZpSoqYceFMHp/xiGDJlQF2nJ4+hfdV6+fKOkRAo6SUmF69ejcSESUzNmLAwPv4lv8/IKFRSaSUu3ZjekxZ2wjk4XT8/t6Coh4XkFFVsVzdzVdSqutmkTmWUjMfElvs3IyFZQ0EatJk2a+f495xuC9WBpaSnSvCYm3YWFVQIDw8+eveziMpnSesrsQ39KSi3Pnr2Iph0T83DnzsOKikrq6u3nz1/p4DB07twlmZk548bNRNWY6DfLlm0gyKBAzbH2DAu7uXr1ZvyosBBpUI7I6tChJ1u6Cs7Gjd7UvP4hJch5ldXNf3hjsU8vtkfPnbta8GlnZmaJiZEhcFu1MmSbHQEAAAAAAACAP//bejA6OsbLqzLX3tixs2iTFrFmzcbq9QsKfq1du5G2ryHpR7RrZ1xU9Ac/PX78HG3EqasEIM8PJiQks8eqS2+khk1JeVHrep89e7l06TIfn4NpaZy4mrm5+ewKkZG3W7cm/WZFRRXZeTcYh0NhYXV0O2/eCoLMrIE1VxS6Pnv2Cq7Zvr2ppKQ8vs7MzKakLnHsWKX3JhNfFD11cBhG8EDKxWX6yZN+p06dSE/nxPbp2tVm6tSZrVoZoMeMcF6+3F1dveWwYWMZPejgMJpg5V53dZ3Zs+dAfD1q1FT0utFFVlY2dkw9d+5yrTvGhY5O17qI9HrqQQ+PXVzjamsbo3I7uyHbt/tQqhB9AyUFD4SL1LGMDBLmhIaGAbNXAAAAAAAAAFAr/9t6MD39O/qRbGvb+/z5wNDQiPHjZ1LOeGpYEuroGC1cuOLChYCIiNunTl2YNGmOujr2ERWjLINiFhbWv35V+vV16tSnvmJBAkmkV6+qpGKn0gRIsvIG1vQhHVb79q39XBgSAr9//2GXhIVdU1bWuHIl7MmTRG/vvRYWtoxdDv138WIgU/PZs1d0WE5xHMyEIESMjMzx05EjJ6EHv35hF9B8dD158lx0feECDoxD7Np1kOmK7S+an184c+ai/fuPrFu3ZciQMRYWvceOHff4cVL1ybu6zhIREe/d2/nQoYMpKRz7o52dI+r8yJHT+LawsEhGhrTbLliwCpeMGTPj9Gl/dPHrV0H37v3T0r6g69hY7Psq8vbt+1o3jc3Ll68p42+dIsfWRw927WrHHnfFivV4G/FKTU05wUJHjxbU3xVJYBERderPF+Z1WjIAAAAAAADQxPnf1oMIT8/dVY1T+FyYKg7SwgtV6kMqpvv3Kz08P3/+SikFiXpJADJ0JxKn7IkVFxfLyelQI/JpiI/myXz9mlHrSpEe7NvX2drazt19y6ZN2wcMGMValHDVNZIac9YsN6ZtUVGxlJQWfhYWRnqW3r1Lporw9j6AKyB1bGxsga+PHDmLHmVl5QYE4NArhI+PL9MV1oNodRWU3So+vjJwzfr123v16j1p0nTGpxFVCAgIOnv2gqPj2Fat9GfNWhIdHRMREXnp0pXIyOh37z7FxMQmJT3HldHEsGKVl9ecPHnO2bP+lpb9Dx48hh6lpLwYN242rnb06GnyLaq2F+wLUomfXwAtlv9RPUjG6hk/fsa9ew8jI++OGzedGpQ8Nbl1K5lp4tq1SKqEDJGalFS7URiRmPgMv+JevYbWddUAAAAAAABAU+Z/Xg8ipk9fROsgCerXvhL2wKSRpX6ic6kAVFPo3buPTCeenrtotVgfkxDlyFfGnhXSRK1aGfHVg5w4LZGR9wVc6fLlG6rJW9mlS1c9ePAoP7/g69f0c+cuGRh0ocqFlJXbss+a9e8/Ejc4evQMLpk0aS66/fyZlKIPHyag6wMHTuJHlpa9VVS0Jk6cj2XIoUMnmH7YejA7OwcJSWfnieg6ISFZSkpZXV0PqePx40nLV27uj4ULVx896rt/v6+pqbWqaovmzfWcnEbv2uVz9WpYWtpnrtXNnbuCmqDCjh377eyYXBuEpqaJgYGZtbXDnTt38/N/TZhATtvGZrCAm8awfz+Og9roSee5PmrV/hahgEProAngmbRv3x0/0NPrLMjM3dxwmCNi5swldV01AAAAAAAA0JRpCnqwgoxLGWhu3hO7iWJh2LPngD59nDQ02snKtqomBkkhpq/fnd1D377DG6IHbWwcq89KW9u0Zj2I85WLhITcFHyZMTGxtLEJNReRklLiyitRQerQChubfngXmLx+iN27D+HCsWMr3RQlJBQ0NfXx9ezZS9HTzEwy7mVpaTk2YGEh4+t7lmnC1oNZWTldu3abMGFeBZn4fg+a0rJlGzU0WjCxUp8947iG/v1bmp7+OSPjK05aUUF5h3LNXEGB9Oa1teUIvdTUVA+PLVpa7dnKSlFRR1y8ObrYsMFb8H3DIJn5r+hB5iNLvSl8TSbHvHXrDp4J0u/UTMiv68KF7vynXVT0R0JCGQvzsLCIuq4aAAAAAAAAaMo0ET2IefcuNSTkamBg0IsXlW54pqbWVUWZGs4+z85ogESKpGRz6id3PX72k4pp8mTukJLl5eUqKu156UFOUvhWrdo8eZJcURfQPKlzZMLY//Dq1VvMo/T09NJSzvXv36WKii2pWS1kKqSlYYdYQlJSg4lJkpj4EpWsWuWBb5E81NHpiK8jI+9TBy1JzXLq1EWmH7YerCBF1mGCjJa5tGvXPtranezsyNzrbm7rNmzYtnatp7u755gxUydMmOHuvunMmUu3b8eEhd08duzMnDkLunfv//NnZTCciIjbWPHt2XOIa9UBAUHDho3Eb42CFKpIUtVp6xDbt9dJD6piEVcVtfpmLURzls3MzGImM2zYRKpDMosE/4yZ1tYcda+kpFtWBsFFAQAAAAAAgDrQpPRgdS5dChYSEqV+dTOWQTKsyrp129nVrl+PpP366vFTX5myoJ3hGvrduw+UmOKKXqKOFZaDw+CfPwvrsSJ7e2zHJNq168YUTp++UExMXEtL9/Ztjuvpzp1HSCuphDo7HKWDwxjc9swZf6Zw0aLVqOTly3foOjn5NWN6y8//JSKigmOxnj9fGcmTSw9+/Ph5/fqNHTqYU7snR6k29WoySpySluhfJW1tI0NDi/HjJ3IJuv79Xaia8qqqrUeOHBMefoMrr8Lz5y/WrFnXrp0pmpKERIvi4t913bpDh04KpgfV8BlASoJp29gMmjp14erVm+fPX0m1lav7N4T8i0H37gPZk/n9+4+OjhG1XWhniIMHj1af8JcvX3v1cqAmQsp/P78AXM7k3QAAAAAAAAAA/jRlPejltY36LS1FKRTsCErCpLRj2LXrEC9DnuCmH/nc3J9cfe7efaRan+rYyOXmtqbei9q16yBexcKFnAR2tMwhKNtfC5zb/fnzV1hoHDhwgmn75Ekirta6tSm7TzU1LXn5ZvgaH1EsLf379es3xoDl7x/MVMZ6EMkZpNfWrt0SEnIDl2/c6EWrLSU8yqRJ844fP9+3rzOWVswk+/cfPnz4SHaMmoyM79RrkqCSgHBi4+jrG92795Br+fPnu6NHI0ZMrsfWPXz4mOpYhu/bVMeHT/v2HXz9esSvX3lM86KiIupd8wxPKkVHIqrJ37jy8CDDtWu36W8I+aYcHUdERNxOS/vy4UNaVNSd+fPdpKVVma1DahS3WrJkVUjI9XosHwAAAAAAAGiCNCk9WFxcfOlS4NGjx9et26Kv3wUrJMrcI4Mlho5Ou4CAsOoNp09fXF89SB4DtLLiEdukTRtjStowAkEVW3l27TrQkDVSQo+02a1YwUmwuGSJOzV5dSwrcBYGKnUgqSa0tIzZzfv04ZgXfXxOMIWpqZ8IMh7pUnyroaFjbNz9ypVr2GCH/gsMrNw0Sg/KVVSGAxXt33+EpGRzOblWdEoLomtX850796M6UVG3Q0Ii9PUNpKQ0Vq3aPGvWXB+fo0g7i4mpmJv3ZfqcO3cVbUYkMTGxXLx45dWrYVwhW798+SYpSVpjT5++VL/da9ECR/ipyeeT8468vbkTCFaQDrR36Q1hv315+m8O+EKBjnDL/oiJiirhNB9s6A1UZPVDCAmxQ+MKUY9E+/QZ/P171sWLQaam6Fstnpv7o37LBwAAAAAAAJoaTUoP/v79e9u2PUJC7OCiHAwNzfbu3Vdayvv4laVl//rqQTIx+tWrN7g6nDlzCW0sq2Ikmjt3RcOXaWREJrCztR2Gb9PSvjCCt337biUl5DFCSjaK4wODfn5BTNuPHz/TBjhpJqk9Yt060paalESGf/n8OYMgfU3lKH1ELjA0tHKB2D5YWlqak5M7cOAIfOhSQqI51aecoqL62rUc8+vfv3+FhLBZkBRZGRmZeAJ6evpowqmpH3C1/PxfYmKkApKUVHNzW3n7dnT1JT98GDt//mI6faR8Tk49BdHJkxdo1cbTkEcq6IkT5/Fse+NGFN2WEYPkumbPXpyU9OzNm3dLl66pWqHyLwajRk2v3iFS2bQeZEyT4vQXVpIlWhV1dMxkZfHaicjImPqtHQAAAAAAAGiCNCk9iCku/nPr1i1v712LF69cuXL9qVNnEhMT+dQvLy9v3rxDvfSgWnUDXHFx8cyZ82hDElt0CKup6TTKAt3dsWemzM+fHG/Gp0+fjxgxcurUWZmZ2bhk+/ZDjI2peXMDdvM9e3zxAz29Kl6jbdp0kJRUwte+vudoqULqwevXI5lq7Hz0paVlP36QMWHKysru3o3Jzf3FNVVf37O9e/fft8+XcbxMS/tqatqdcXatqDQOEmfOXGS3zcrKCQgIWrJklaGhOVva29g0KAefvb0zLdWrS0Ikx2SZCKhc3L8fRzVkzqKSGnb58vXsOv36OVX7IpFSPT6exzcwKek5VVmKrimvrt7B2NhaW9uEXqsSrVI5PrReXnsbsnYAAAAAAACgqdEE9WBdQRpKWFi97sFFOfEnw8M5Wikl5YWn57aWLdvRYpDLKZEwNe3LfyYCQtn4SEPSyJG8j9EVFBTjg4oqKm2xjti+/SC7wogRU2ht1Z8p/Po1E5VMmcIJlNq6dWfa2kVERt5jqrHjySAZ+Pt3CbZIVpCZ4g8fOsQdFyUoKDwuLi419eOHDx9SUp6jXcrL40jICsoFFMcLVVfnFssvXrzu18+RJQQ5YV78/UPqvmeVlJdXmJkxIWe5JGGl1bU6WVnZ1AREGIGvoaHPVefIkdNV9SBpcDQ0tObZ4d+/f9XUDKj6eBpCuromdD+nVFU12Cq4fXvDc+cu8+wHAAAAAAAAAGoC9GCtIIVCSRLpehgH7e1H4E6oYCMy9I93VV62JzEpKXXGotdAevTgCKVFi5aVVw3EmZr6QVaWlCHR0Q/RcMLC+GCadFZWLruand0g3IO5uXVuLifvw44dZLCaFy9SK8jcEzgBBxlP5t69WKYh1oN//vypIHXlZDExmTdvyNik2OMUMXz4RKz4YmOfzJgxj94TueHDXTw9vaOjY96/f//nTwm9kIHUU9L+paPT8ejRY79/l7LnmZr6fsMGz06dLEVF0aKEZGU1GyXnAp2ikesdEWvWcMcaqtpqCEu+EZMmLeCqMG/e6qrdkjbEEyfO19Sht7cPqz5ZedUqT/woP78gLCxs926fgwd9Hz58UFMPAAAAAAAAAMAH0IO1cuFCAMs3T8APmVJBWFiOCStaUlJCG+OqW50qTUVubhsbZc5Xr96gXQ0JXV0Tb+99qOTy5bBp0+agkrZtDT9/5pwNjI1NEhYmjYnW1o5cnfTtOxhLtebN20RG3sWFRkbmUlIqiYkvRURUKYWLPqKPHz9lWmE9iNZbwZEzIrgcqd1Bg1xMTbtLSbXAJcOHT758OTgh4dm1azdjYh4mJiZ//fqNPYFTp/xptSizZcvemTMXoisxMcm1aze9fPmaXTMrK0dcnBS2y5ZtaJQNRHTs2JU2O1bqwW3b9vFpcv78FdZXhZgwoUrSydLSMmlpNSqqD9OhqJCQYk0OqBWcrw0+GIhD4JJ/UggO5j6OCgAAAAAAAAD1A/RgrUyYMJdl9BHQMkgKsaCgKr/b37//aGeH49LwzHCH+pcUEpL68iWjUaatqtqOnnYV5s9fyq7m7b1TWFgam/l8fE5ydTJlyhym4eLFy7GlUVxcQUFBVVJSk1oIWqlkUtJzpgnbX/T79yx0bWPjmJ9f1Lq10dix01A/O3ce/PuXTFeBZGBERBTT8NevoqCgq+rqOiEh19BtdvZPISEyHouKimZiYgquk56epa+PQ4BKjhw5Njz8Gi6nU8lLfPlSRVE2BMo/lskTwdGDW7fyO6BXVlampqZHyzcxJaXmTKjPsrK/vXsPrPZF4ueAiomPf0a/AXXqI4RUpL9/QGMtEwAAAAAAAGjKgB6slZYtjXm5DtZybHDlSk+evfXsaV+zuiRdTE1MejbKtL289tLTVsNyr0WL1lFRlcEn79+Ps7CwoerI0opD+MWLd1z9HD16SkZGBT9u2bJdWNh1f38c91KaWgWSS/Lv31d+Pdh6EEng69dvEISCubmVjo4FEnHCwiqdOtkqK+uKiqq1bm1iamrVvXvfGTOWTp++cOBA5969+1la9sKRRbt0sSXI9ApST56kMJ1fvHhJS6sDW96amlru2XPYwKAnunZyqk/aQT5MmcKVZ4TYsGEb/yYBAeFUE3w+lGjVygBp1c2bd+roMN+iKnrQ3Z3394RNRMRdGRkF/IJwt9T3pNvcuQsTEpIbaa0AAAAAAABAUwT0IH9iY+Mpi4yswGKQ/N3u6jqjpg7Rb3vq57xiDT2QZ8SmTVvU8JmXlJSIi6tRCkKRmtLEwsI/7Aq9ew/DNjX0X48eDlhoaGrql5T85erq69fvEydOw2kNEdranUVENKiQNarUv4o4pyGGHV8Uk5jIWLhUWHnnJdmybunS1WfP+r148RI3mTQJ22SJrl374ZL4+MSWLXXV1FouXboqJOTm+vXbhIQUiKq8fp3a8H1jc+VKCNUx4yqMJOekWlvZ24+gWqlht2EWatUD1GzbJlBE0M+fMyZMmCwhoczubsQI1w8fmuj/MQEAAAAAAIBGAfQgf1xcZtK/5AVxE5WnJIMrnw7z8vKpk3ciNStK0knSw2NHwyfv4bEbC4fNmyvzp1NhO0k+fkzDT3fs8EG3y5atx7fdu/MOc/r0aeLMmXMlJUkHVHl5HVrQkberVnkw1Sg9KIPPD96+/RAXRkc/bNOmLWVSFB05cpKb2zq0A8OGjb98OSQgINzY2MzMrBfTw6ZNOxnJY2vrhAtv347x8trKns/Dh49FRaXoOD/E4MHjG7xh3KBBqVkwGQNFJSXVymsLV1NYWKyg0IJqqE5rfJ4ewviMIe9shjz59i0jKCjk/PkLN29G/P5d0qC1AQAAAAAAAADoQb5kZ+dSR9gkBBCD6vjM4PjxPBKLc9GihSFtLON3/NDTc2cD54+Ui6xsS9RV//7OuAQJHG1tnf37T+Hby5fDAgNDmfqdO9tiFcZH0v769ev58+cBASFXr143MbHEqnDJEnemAtKDQkIKFXRwldGjp65evdnZeUzz5uS5v86drXA1OTnZ2bPd7tx5YGpqRhDCqBou37fvGJ5Du3bdpKWRjBLPzuaOuZqVlYMvxo+fR5sahdLSvjRkr3iybNkmqn+2miPGjp1Wa8Oqh/74fG2EFBXbNPq0AQAAAAAAAEBAQA/yYdSoGYIZBzlugStXChTc0tp6EG1ZY3xElat1KEHZ3RoabvTkyUt4bqtXe/j6nqFFivCTJ0lcNQMDr+rqdqCkKOlfOnHi3JSUV4cOndi0aculS/7smnPmuGlpaT179hJVQDVnzJifnZ3DPGX04LVrkRISimz/RjExuZs3744dO2PixClSUs0JQqRXr6Ho30WLVuK2u3cfwjV1dY3QbXLya4J0T2376hXHH/XGjVv29oP37j2Cb5kcFjNnLm/gRlUnMDCMcpGVqWq9VaI2Zw5OqMGHsLAoASQhuT9nz15p9MkDAAAAAAAAgCCAHqyJW7ewryD/k4NqWCAICUnwySLHBa3LlOkeZOhb1ao9S1MGx9qtUfzp2LE71T8+/SeG/715M5qp8OdP2cSJ0+k6OIgl+2geaQEUEZE+c+YiqowaEqQDKunOunz5eoI8tfeBPRz2F/39m5RLERF3AwNDnz9/jcTj5ctBOHTqtm37UCsjox6HDp188ODRmzcfccOVK9fj8dq2Nf7x4xcupI9bEo6OLk5OLtOnz/Xy2v7792/8tG3bLuiRtLQ6Hq6xKCwsWrnSHQvnamquGZ7P3bu1p/wLCrpRmyQkD3gqK2s14uQBAAAAAAAAQHBAD/Lk06d02rbFx7jDifTYvbvNq1fcYTn58OfPH0lJddryKN6lSy9Hx7Eshch9INHCwurt24/1XguSY1Tnoqg3cXE5e/uBaWnpzNOoqHtt2ujTyheHQCE1Y5cuPU6ePPv9exaqk5b2jYonQxw5QjqaSkmpiYpKKCpqE6T/py1X/vfq8WSqk5pa5euEBKmzswveTD09458/q+Tju3HjDiNNz5+vtFTOn78MF/r5Bdd7c7h4+/bt+vUeGho45Z8E6+1zUszj8jlzFv/8ye3FypNr126LiEjS77rGvJPDhvE7cwoAAAAAAAAA/xCgB6vz9Gmymlor+jd8TUqQzOEuJaW4dWt9Tvn5+Byn+idti5062aKS7t3teY3IkSGiorJ79+6vqbcTJ05t27Zj27a9+/YdPnXqJGNxY3Bzw+fghDp0sGCXu7tvoDWOKvXhOL5u3swjq4Kv7yn06O3b90j89urVp0OHTsuWrS4pKeOqxs43IQjR0fd1dQ3w9Pr3H8oEQf348XNS0jN8TVkJOdFNp01bGBcXP378LHzbp4+zgAPxISEhwcfnoL39EHxyk7UhKkzMWPLdqLVetMjt7ds6aH9ESsobPT28QDle3yhV7DU6deocpklJSenx4yczMjIbvjQAAAAAAAAA4ENT1oMFBYXOzuN69bI/dMg3Pz+/vLz8+fOXy5atoSNnVrcMso1EojNmzElL+1rv0Y2MemARJCHRooI8K/eSupXncz7R0tImKCiE3UlmZo65eQ+CG/EbN25zDaen15kgT/BJX7lyFd2+efPO0tKatoI1Z7cNC7vJtIqLSxwwYFDPntbe3rvR7ejRk5o1M+C/LqwHaz1eV0Gqnr9ubqtY+otYvHgtPW48ur14MZCpzJaElK2ToJajlJf3q9aBqlNQ8OvBg7idO/eNHz+9Y8dulF8oRrKqEhTHpUZG5gcOHPr1qz5jVVAGUFfXidWUJrch2NZ24K1bd9Cq9fTIZIWurjMF7P/37z+RkVG+vsfPnj3/6tXL+k0SAAAAAAAAaII0ZT149OhpRgUpKelqahrTR/nEqolBNZwGAqGo2HzWrPlMprx68+1bpqgoqQLk5XXKqRwGysp61Ag1WSQ5uecMDS1WrFgfFBTu63tWXV0HT79qXBpCW9uIa7jU1E9UkgtCTk5n/fqt4uLNmLGEhOQdHcf26jVYV1fn1q3KhPWRkffZInPs2KnR0Q/REJ8+feazLllZbYKQrnX5J0+e19ZuSwtSdSa4iru716NHT5FAW7lyHVeTGzfuSEnJUZJNDhvyrl69JchWZ2dnp6SkhIZe37Jlx6hREzt16qms3KaqguYRNIZCytHR+fr1G4KMwubFi1RX1/EdOnRychrx6FEiLjx//pKGhlYNqlCN9acGDl279hFkrDVrNgkLV0nmaGbWHb2pus4ZAAAAAAAAaII0ZT1IBZAkKG9AJs23JC/bDUcJdu/e29f3+I8fPxprAnFxSbjn0lIyBfygQWNppcDnxCJ3EnZKTHE1QcJKwsrK5tq12+zh/PyCWa2EWdnxiMmTF3LNLTQ0gjbDqTNhc8TEVCQkJJ89e119Ld+/52zbtn369DmiouQkp09fuGrVxt27D1y4cOXu3ftv3rx9/77Si3Xu3JW8ZBHHTIbYufMArvnnT8n8+Svz8vLx7e3bcUJCwjjSzurVXnz2NiPj+/79h1xdJ5ua9pSWbo7jtValeqQgJrYP0aJF2/XrPd68eVPnl1oZiaiS06cv40e/fhWvX79RWppb+tGupDj7pAK2ltrbD+c/UFHRb1NTS6q5zOzZy318jnl7H+jRw4Hew0P1mDwAAAAAAADQpGjKerC8vLxdO3Pqt3NN+ovjqDlw4LA7d+78E3MICYno3Llnfj7piOjj41ttMnxSmStQuq8m2cixcKH+2cMtXryWKpavKiFJwRsVdY9ds29fZ1oMsn1lRSmhsZdrFQkJKSxpo8JE2mGDZB2u/PQpVsHiNRymQzKNGDyYI4WUlHQdHEYyAUVdXCbhcDf9+tWolbKzf06dOlNKSok1uBAlIRVq2C6sSUVwVSurPufO+dU7YGlRUZGoKGno9PU9j2RsQsILaWlyJu/eVcrh798zt271NjProaKiraVlrKPTmSUJmVkRY8fWksuyZ09S+k2ePKugoJBdHhYWgX1rr1+PrqktAAAAAAAAAFQ0bT2IGDjQtWY9SIoaRcVmV64E/dPTwHrnzZt31GQkaSnKJO+Tqy2tOZevoxj1IecvJqby48dP9lgDBoykl8yckpMgCKlnz6p4wKJWZmZ2VE21qv2T9kQjI/ODBw8XFnJkSGZmNhKJAwY4scxw8nRlUg0JCyvm5HDsqg4OLtW65bHzdnYDhg4dO378bGZKjo6jcdd6eiZ//1YJaspw506ciooGrQHVmZOGtLZVrbZdnE0WE1OaPHn6w4exDXyVXl67UW9r11YG5ImOjiXILI1LqlcuKirArsL79h2hN61SD86e7cZnoIAA0rrdt68Tz6f37j2mvr2QyQIAAAAAAADgR1PWg1++pFNWJMmaLINqaq0+ffr2b07JwKAnrVyQBpTZs+fw2rVbaJFYqyRkwt2oCAmpUhekX+XJk35co3TpYkU9ZcQR4eGxu/pk0tOzqKQbEjVpKA0NnaVLV7KdKp8/f75p02Z9/U4sFUY6uG7axInC+vnzV8pVVaK2hZCSUFpag+l5ypQFuDslpRaZmbxddlkp4LHvpbSBQdcjR075+p5p1cqg6pLVmFCiurqGW7d6p6XxOxQpOJ0790J9FhVVCa8qKiqvrKzHv6GxsXXVP00Q06cv4lO/a9d+qE5mZm5NFRwdyb91hIcLdMQSAAAAAAAAaJo0ZT3YvbtDzYYqpKSEEhKe/8tTioq6y+goK6uBuHDPnoO0lZDP0ULsQSq2bdvuT58+p6V9Wb16I+5nx44DXKMUFv7W0mpPr11UTEylpKQEP8rKyklMfMbUVFLSqSpSqmtPUrcNGuQUGhrGHuLOnbvTps3R1GyHazDGRw+PnVV1GZ+PopCQCNZEnp67aCue/MuXvNM9hIbeouejwUiquXM5Tqo/f+Zpa7enbXAYUXv7QYGBjWz8VVBoQxCyXIUqKh2RBObfcO7cFVx6kI+/6N+/fwlCUl1dn0+H2IA4b94KwScPAAAAAAAANDWarB48dcqftl7xFCOEvf2o/2RiO3cewnJFVbUtUzhx4jxeukyVjoQjRN2SnpmhoZXZIsaNm4lK2PqO4fv33FatcDhTYsqUSjtUr15OBJXj7+DBE1SOP0k6eCnbmVOOllTYailBG9qMNmzYnJJSZbjLlwOmT5+XlZWNb1u2NKTqytQmBlWY8DKmprY4zIu4uPyTJzzWgggOvoEqjBs3afHiDdRJQNyDrJCQ5PXrkbhOWtq3Fi1aE5RZc+7chcnJyfV9RfzQ1jZFspWrUEREVlW1Hf+Ga9Zs5tKDgwa51FQ5Pz+fIOOI2vPp8PHjBGpPZgk+eQAAAAAAAKCp0TT1YEFBobR0M0pG8TQOkiLL1/fMfzW9x4+TVqxYbWFhfeNGFFOoqYk9HhWpDw4mQ0ZWUVBoIyurRRvdiIkT5zJNUlJeLlq0pqZRMjN/dOxIRjK5dIljI4uIuE1wo1p1i9Rxqj4zs97W1oNw7BfaOZM57Shub++4f/+hnJzs6oOeOHHeysqu6hDSfCUhJyyMomLzlBQecU0rqJg8qEKzZq3R9YgRk1mqinT6bdfOnKmZn1/84EFMcXExz34aBReXaWhQP79gpiQsjNzVGTMW82/o4bGdSw/a2DjWVJmyD0qz/2JQncuXr6JOFixYWdclAAAAAAAAAE2HpqkHt27dy8vcVumpiJ6FhUXU3tE/THZ25emwFy9SVVSaV1VSxJgxU/Pzf+Xm/uzVyxFHyHRzWyd4/0+fPkdN7tx5gG9tbQdTvarT1kau0KbkiTxRUbng4FBc//nzF23adKwqq1WZc3kyMs0cHZ3PnfP7/j2Da9zk5KRdu/YNHOispdUBq9qq8lCRNTppItTT6/DxI++DnMHBEbhZXl5hXFwiLZkr9aChYU/BN6Th4OipsrIq8fEp6PbJk2QJCfIE5YsXvMUsw86d2CuY2XPC3Lwvn/qWlgNQnbS09Joq4PQlSOPXax0AAAAAAABAk6AJ6sHy8vJWrUxo41eNejAgIPS/nik3SPLcvh0ZGnotPDzi1q3IV68qJcaVK6FYFqWmvhe8w2fPXqImt2/fx7fU2Tc+20KaTbdu3cfuISHhORXDU6aaclTBlkSEhISKk5PLkSO+b968rTaFssTEpP37D82YMd/WdmCzZnpUHooqDBgwtLi4hOf8AwLCcZ0zZ66gWxkZDVqcVh5vvHLlquAb0iisXbsND62s3BZfbN9+sNZWp09fpOoy3rk8lOzfvyXl5WX4OjSUdJG1tOSdsx7HF1VR0WngWgAAAAAAAID/bZqgHnzx4jX1w1uKv33w1KkL//VM68DAgWQSh8OH6+bjWlRUTLBy+Tk5TcACroZYpkj0CX34kMbViaKifs3GViXKtZVJ+iBjZdVv2bLVERG3CgsLqs+npOR3UlLK1auhJ0+e8/E54uNzIDi4RjWH46UgXFymoduRI6dwCUlJSdn/Kif74cOndHXJbenQwcTPL1CQJhER0dSsmQOtRJs2nZmnmzZ5aWlxgvP07t3v+XNSWeMckcOGueTmVgm4GhJyHecfjIq637jrAgAAAAAAAP7HaIJ68NKloKqGGN6GMG9vn/96poLy9Wu6tfUg5hhgndi6dT9a7I0bZOLy0tK/c+YspuVU9Wx9pN8jV4rzv3/LJCRUKcXHZR/kSkmPbXaSzL2KShtr634rV64PDb325s2b0lLeFkA+xMcnhoQEBQWRJ/UyM7N1dU2MjDqZmFj36NHX1XWyr++xb98y67EhjUh+/s/qhQUFBampb799+8pVnpz8AieCZPQgTlGRk5Ovrq6Jbtu2NV20yH3KlPnU/omnpZE9WFjgw5iiLi7Tt23b5+6+tUsXW7zDPj4n/vklAgAAAAAAAP+3aYJ6kD48qFyzHiR/jfPPBv7/Ffn5vxrS3NKS1BTh4ZzApD9+/Bw92pXRbSxVSOq+Dh06s+OxzJ69uAbjIGFubnvypN/Zs/42Ng5VN1yVlfSBYzfU0GhrZ+c4e/ZSL69dBw+eOnnynIeH56tX1f1LefPnT53l5L9PdvbPadNmMWvW1Gy3c+de5ml6+ncREQ1WcFQyEs6PH/mammQY2JiYWKbm27cfUUnHjhxv0jVrNomIyLJ309LSCiyDAAAAAAAAgCA0QT24fv1WloTB2RMUqgUaJXr1Gvpfz7TO5OX9/PQp7ePHT5mZ38vKSgVvaGXVGy25a9eeu3YdPn8+sEePPgSZDr4FrTBUaVVIhjDV0THx8Nixa9fBgQNH0i6OXJZEYRMTS3b/SBtSxwl5Sm8Ftt2Qwdq63+fP3Ea0WikuLkxPz0CbkJ2dVde2/ygZGZnNm5PJHFVVdUeMmDpggAuOztq+vfGXL98ryKSQRYqK7GyPEoaGPW1sBhFkxpCXXL2pqpLBZrOzc/BtcfHv69evHz9+6uzZ84mJif/22gAAAAAAAID/szRBPXjgwHGWuUqR0oNK1dxHiRYtjP/rmQrEq1evjhzxnTx5FpJgcnKaQkJoXUoSEs1atWrv4OC8Zcu2uLhHgvTj7b2HiV9qYGBy9erNrKycFSvWKSho0BJNkVLN6lV1m1I1KU1miLh8uUp6+mvXouge+Nhkseok2b279gAsrB14vXevz/Dh43R1TSgNy4kkIyGh6uAw3Nt7V2xsXN329B+gX79RaEqenjvYhZs3kzkmtLU74lsNDQOWHuQklxw9mjsrfVlZubg46bubk5NbAQAAAAAAAAANoAnqwejo+9Svbnl8xm3w4LG9ew+jU7ozHymCEK81R8B/SHFx8eHDvjY2/aqqMyEqPqc4K4QLib29w82bUbV3Svo0ZuXl/ahakuvtvV1HpyPdmRhtDazJ4ZbUg2fOXGZ3QgcC5XNmUx0nnkBS9NEjQS1cyckvRo8eSx27q8K8eStmzlwqL9+KKTEw6Lxhg2da2mcBe25c/vz5Q5AOokbVH82Zsxw9Cgwkg9mqq7Mj83BSOs6Ywe23PGvWUoLMTvh/z34NAAAAAAAA/P9GE9SDWVnZ1C9tEcoySLx792HJEnfW73Dm1zixefPO/3qyPPjy5dvKlWs1NLBvIZnUQFRUg5fTJrMQGVxz+vS5tfdeM0FBwQ4OQ4WFmaN/kjWLOxGkv9htjY271ewvqsYcJ1yyZHl5uaDzoYxrOKWFOGvthIKCJlMnNfWtn9/lvn2HcqSsmNL48VMTEv5tj8qSkhI0upaWSfVHHz9+IcjDqssqyHwZrVl/l+DYB4WFpa9du4Urf/36bdYsTsCfuLj4f3UNAAAAAAAAwP8iTVAPIjp16oV/VHfoQAblcHWdyVMPGhnZ/NczrUJGRqab23JxcUU8eReXSVeuhKDy1as3UwU15Q3EmotME9+tm21Jyd+GzOHTp09bt263s3NgHfoTopQ1dzyZzp1tTpw4f/r0xZ49ueLJsDeZEwjFwcHxyZMEwacxefJcqp1w1dQYpMF34sR51evfvHlbVrYZYy4cPdo1Pv5fVYV2dk4EmdQynKt8715fVH7kyCl0Te2GBGNmlZbW1tQ0xRPu0WOgre0QISGOP+3mzbv/zckDAAAAAAAA/6s0TT24YsVG/Ls6Pj6pgjzbNbyaHlTB1sPY2Kf/9WRJysoqNm3aynKMJHXB3bucmJPfvmVQ5k6JmvVgpb3JwoJ3BvO68vLlywMHDg8dOlpBoSVRBXH6UCEb1apxShWZB7179w8N5VZJ/FmwYA3VVK7a0UXylbFDcbJJTn5FuaTKEIQ0HnrChKk4lsu/wNu37/Ggy5ate/Hiza9fBe/efaRDGwn/+fPnxYtX1DWTf1BBTEwmLCxi3rxV7H1UU2t56NCpf2fOAAAAAAAAwP88TVMPHjp0Av20dnPbgG91dc156UE1Sj31+2+nWkEmTAzQ0cGRRoiRI6fJympjF1BjY1umzsCBY7FcEEQSLlq0rhGnl5f3Iybmvqfn9gEDhunoGAsJKRECoKamPX78lHv3Yuo6XHBwBNWBNK/FEioqbZma/fu7GBkZXboUwpTMmYO1FdoEGQkJTvAcNPPG2YjauH79jqQkkxii8oCnvz8Ze2fz5l1Vv4TkNoaHk56iaHs3b96ycaNnaGhYQUFxbeMAAAAAAAAAgKA0TT3o7u7Zo4c9vk5P/07JK3FeAoqM4njy5CU+XX3/nnn37t24uIeFhYWNPs+3bz8MHTqCNmZNw/n4nJwmUAXkmbuHD5/gmnfuPKBNZnz0IJaEpJFR8NR+dSUtLS009Nq2bTvnzFkyfPiEXr2GmJhY6el1btPGxNi4x6BBIxcudAsMDC4qqs92lZb+lZHBWTDUeS2NWLzYHdfMyMhkBJefXwAuRPKKVqPt8vJ+nT7tLyxM7kbr1u3CwyMbbQtq5ufPAk/PrXZ2/Vu0aG9kZDFv3oLXr9/jR0jpV9eDO3ce+BdmBQAAAAAAADRZmqYe/PixMs6kr+9Z2mbE89idGEGIxMen8OxnypTZUlIc10dV1ZZz5y5sxEkiSUU7iEqiOXz69A2Xx8Y+YfTgwIGuTP22bbGVk88pQvwhE0bY249qlEn+/v377NlLSUnPGqW3WvH03MPLkos/pOmNibKyc+dBWjYSI0dOxoXXrnH04Nmz/rgkNzd/3jxOhBb0NvPyCv6dhXBRVlamoKDFSkavgi2527f7/CfzAQAAAAAAAJoITVMPsrG2HsRXRpGCQkxMNjw8gqth376OtAFKAZ9cQwwZ4vrkSfybNw1KVBEfn2xm1pPuHOlNMhCKg8MYpoKhoQ31SAoJRiZp+759vjVrJa4Paij8/n0jvFMkZHCQk3btOjs5uW7btvvmzahXr17n5+c3vPPqqKrq1fyyCF1dM6amjo4ZVZMU1Nevc2x/O3YcJsjcE924uo2MjMHZLpo1a33hgv8/MXP+bNniU+3dkXpwx479//5kAAAAAAAAgKZDE9eDhYVFUlKtqiUf5CEJCTJw5czQ0Btv376Pi4sfNAi7cSpVDZOiTBUKE4Rojx42SUmv6jGl9es98XC9eg09dy5AWBh7eJLRQdPSvuA6ly9fpUev9JDMzf0hLq5KHUyrVQ+SRyMXLVrbKHvYvn03ghsZBQVtU1OrQYNGTZs2Z82aDd+/ZzZ8oMDAcFog817RsmWcA6EpKS+YqUyevIDpQUurA2VD5BEj6OrVSMo8RzJ69NiMjOyGT1hAHj9OotSoZFWdi9M48vNVBgAAAAAAAIAG0sT1YFLSc+qnuJQAPpYyLIOdKG3N4TJU4ciZMjgXA1JnmZlZgk/m0aNEU1Ps80l4ee3AhV5eu6kCUmnOmrWcqayi0pYqRzORTU/nBMnU17cUzESI5ikiLq5WUlLS8D2k9SDTuTwlYIXY+vDRo0ZIlkdnCeEZMwe9QSkk1XFNJI2jo6NPnz6H9DvT/PjxC6hxnz7O+La09K+amp6eXueysjJc0rYtXghp6lVSanbmjF/D51wrQUFhIiI43qkGnU4Rb6Y42sMPH9L+hTkAAAAAAAAATZYmrgdfvXqLvS4Zh0/qunqsErbYEaeCW/LUjIwIEsIWq717jwg4kw0btuCWVlZ9nj2rdDfNy/tFFaNBRcXE1EpLS3H5rl2HGLWVmvoBF+rpcUkz/p/GSWPH0oNqrEQSjFgmlVpy8vMGjhITE0eLNd5r6dKFXx6NlJTXWKR/+8bRzkgPzp+/zMZmIKMHjY3tqCEYOy8xevS4rKwfDZw5HxYvXkFvFxKDhKRki+7dHVq2NMRFZmb2/9zQAAAAAAAAAFDR5PXg379/1dT08c9vG5vBjo5jqLAkInwlYU0faSOjHh4eO9zc1snLt8IenoLoweTkF5aW5HlAVVWN/ft92Y/8/C6pqmobGJhSRkxSpOzaxekQqZihQ10cHAbGxDzCJe/fpwkLS7NsnYr4nCAlThVpmcOEzSFNhBISan/+NNRESOtBdSy4FixYNWbMDKpEnlKIjaMHTUxsqhoH8VrwooSxNXbAgBFbt+68ceNmbm4u0zAvL+/w4WPi4qT75bRpS2vq//XrDyIiTDKISoSEpPz8rjRw8jzp39+JGkGS2jrxdu1MPn4krYElJSVDhoyWk1NOTX3/T4wLAAAAAAAAAAxNXA8i7O3JZPQXL3J+8z979rJlS70a0k/UYm7bsoVjbkO/5LGawL/w+eDhsYU5tmZlNZgp//o1vVcvB0oAkhEmNTQ6UFXEVFS0a+rK3NyWqiPKUjNSUlIt9fUtpKS08Xk0FgrYgjl79ooGbiCtB6WpHdiJCydPnkUVKlLlDdWD/v6h9Jy5ViEnKtrCwMBSTk6HjsWKQfKqW48eA8zM+igptcFFYmIqv3//wR0mJ79mH2lMS/umq2tM7VezqVPnXb4ccvt2zMWLQePG4VUQw4ePzclpzAg5w4fjpCHKtBWVGD16NrtCZmZuTW0BAAAAAAAAoLEAPejj4zt8+CR2iaGhNfVbvc560MjIGjsfFhYWodv581fzGTc5+VXXrj1o/dIc5zo8ceIifhoXF29m1v35c05Empcv39K2JMLRcQRXV/HxScbGFrgjGZnmAwcOX716Y0BAMFZhMTFPZGRUqIZj9+w5Mn78DD29TmwLWAMDjTLxZLy8qnifurhMoefcID1YXl6hrKzNTFdVVcfZeay3t8+SJe5Y/168eLWCzNX4/tq1m56e252dx0tIqHPJRiEhkYCA67jD3Nwf0tJq9+49xLfz5q0QFyfFrJWV/efPX7hG//jxs61tP2rcVpcuBdZ7FWwOHz6DOjQ37yUp2ZIOjyMjKioVHMwdwxYAAAAAAAAA/lFAD/4hqfSZnDNnOaUglASWgUq0EyYSPsIvXpBH/5DCiomJ5TOol9cuLFTs7UfIybWhnCqboebi4soFBTwStW/e7E1LG9JrtHNnq337joaF3dy//4Sd3QD8YNCgkWfOnM/MrBLJ8+vXLHX1VgSZCGMkuzwi4tasWQuoeKSEqal1QzawQ4fuqJN+/YZXf6SnZ4pNhw3Rg/Pnr6I6ER4+fKy//+WCgl/Mo3Xr8KFL7gSRJSV/QkJCp06dLSWlhjdHRET+9etU/HT06KmoJCEhGd9262aPbg0NuZNQsLGxGYj7GTNmfG5uXr3XgsjPL0D9ILHs5xckJCRMf3k4AvbZs5cN6RwAAAAAAAAA6gToQTZIYWEjWw1BLNkfVexvSaNAuZiKMKFdaiIxMcWSOi2opdUaZxNgRRBVo5TCTHb9N2/etW5NnnC8ciVo5MhpVE0u4xeBhM/Ll2+qj3XtWhSusHSpO8/JfP78ZerUOajC7t2H6rdjFXRYzuTkF9UfBQTgDBHiv3//rl/nT58mE2TqDYenTxN5Vjh79jJe44kT56s/zcj47u29q00bA1xn8eJ11J6QYX+io+/jOtgp9PXrysN6Bw74jhw5es2a9cXFnD8U5ObmCwvL0QbKlufOXajfcirItJXOqJOSklJnZ2w/ZccjIqZPX1zvngEAAAAAAACgroAeZDN2LA6EwohBnDxCtpq5kHPmy9TUytf3zPr126SlyZTxBgbd+fe/du0GJgbp+vWV3pUaGvq00COzWsTGcrIzvHv3Ed22bWuIo2Lm5/9q0QIflMPJL6Td3FbWlNJiyxYsMwl//xD+s9q9+wBBBt7MEHyjvn3L8vLyGjdu6qxZS2Vl1SQkVHhWKykpEREhDZpoY8ePn7Fy5erYWB65//ggLd1q1Kjx/Ou8ePFWX5+MyTl3Lm8xVV5esW/fIWXlFrSAJpXdrVt38NMRIyYrKbVhKs+Y4cYIbX39Lj9/cqyB/fqNpso08CNHR+ePH7/WaS2IJ0+SCDIl4kJ0TYXBkar6pSJWrtxU1z4BAAAAAAAAoN6AHmRjbt6X1mVMqEkmJRxGihKJpKAwNjZnGlJRU8QSElJq6jkq6q6RUWeqB1EcilNMTP7TJ46gCAu7RT2Sx0aiNm064fKcnB/nz1fJSO7sPAnPY+LEKUgt8hyrrAxpHFdUp1Mni0+fvgmycGfniYMHuwpSs4JM3X6Ty0ApIdGcZ5zSjIwsISE5rspr1mwWcKAtW/bo6ZkJWHnatLmUQjf/+pW3QP7z5+/GjZuZsDMrVmzE5ZMmzbOzc8LXb9/iQEAi1N8EyNg1u3YdxI8YF9++fUeMHo0NtcTOnQcEnB5GX78ralVQUPjw4WOqA0YJkmJQRUU7N/cfTG8BAAAAAAAAAFyAHmTj4DCK+pWugFTDqFET/fyuxMY+uX8/7siRk0OHurRqpc+oCXNz61+/inCrL1++rVy56e1b3uosKyt32rTZtBhSom2LpGuohUU/ppq9/QiqAkeGbNvGLTSePn1mYmJGkGnpet65c6+mJXz6lK6vb0SQuRXmCr5wJDzbtTNn8trz4f37TwQZOccsOfkV0jWpqR+cncehEk/PPdUrT5hAOqPOnr0UbRGq/ORJcseOpCjeu/e4ILMyMbEJDAwTfBVHj54iyDii0o8eJddU58OHTyNHjsUvY8yYSeXlFSEhEWjz8dOkpOfUEzFGo23ezImYisQpbpWfX4Buz5/3b9FCi/qzQFcmNA1/zp0LQPWlpZuj65kzl1CdYa9jcYL0H25b01cIAAAAAAAAAP4hQA+yCQ6+hn/zb93KI1F7WVnps2fPw8OvRUZGscv//v1bU4e7dvnwymonRdkByYCiZ88G4JqZmdmU2BSjHomiOhkZnMgwpaUVS5asoCe2i8/809MzlZRaomrLlq2v69pbtTI9dOhErdWsrByRfuEq1NRsT5CxUKqcYQwJuUlUC1aDtkpCAi1cJC+vlvQN6ekZzZsb5uTULe3CjRu3sYEvMZHHeUbW3K5rauqietrahjNmLEJamHk0atQ05j1paOhmZHA0soPDGMqq6MnUHDx4PFNzypTZnz/zM8W+ffsO1/T3J6OhCgmxD58KzZw5p7DwT51WCgAAAAAAAAANB/QgF/v3H9fWbrNz58EG9nPxYkDbth3xz/3hwycdO3YuKCgc/UtlYcAJBzWQEJCUVC0qKsZNtm07QNsQSbHg6DgGFd68eVdJiXQitbbunZpay1swNraizF4z6jrbjIwc1PDSpSD+1ZDy5dJEmP37T1Lujq0Zd8d37z7g7Bg3b97hqjxv3hpKFgXzHwt1haqdPn25LusguXgxBDVUVNQsKSnlU620tHzRohWMJLt0KZR5dPDgsZEjXRYsWIy2BZfk5OQRhLCCQkumzoULIZSc9w8IuCYhoYhl3dq1HgUFRTyHS039sHSp2969h9H148dPqfri7dqZLl++MjGxRjdjAAAAAAAAAPhHAT3Ik3fvPpaXl9evbWjoNUtLO6wynJzGpKRwp1qIi3vcooUu9bwZJd9mMY90dHCCBnXqLKFcjx6cNAdIodQ6bnBwOGXS0qvrhPPzi/X0yHgsjDKtiZKSElTt8OFTXOU/f+ZRlk1i5UrOobx+/Uj3VyEh5erG0xMnzhOky+iRWifWpg2ZIz45uc4pGHr1GkoIdrjvzp377dubkAZFEbm0tPSaqtnaki8iNJRjF0bfDYKQtrKyx7c5OT8XLcJpSghpaeWtW3fxt36mp39PTk788AG8QwEAAAAAAID/GNCDjcjZs37dutlgXTBgwNCoqLs11UxPz5aXV6McRElv0kePEnD548dJVABSGSpDPYmFhdWHD9xJ0nkyadI8VH/16i11mnNwcFjLljoEeVBuryD1JSRaDBkyDl0kJDyLiIhkynEujHPn/PGth8cOdLtqVaUl8f79R5GR0ehi715f9CgyssbNYbhz5wHehN276xa2JTycjM9jb88jJSJPFi7EhkLhK1e4Y7Hm5uaOGTMZPbOxGcwULl68jiAjxG5l10xN/TBxIo5PS0hIKLu7b0xNfVenaQMAAAAAAADAvwzowYbz8uXL1avXtWtnQvseiomLyxQUVGbce/To6dy5i7p3t5o4sTK34O7dR6nKpGtou3ZdmfJFizYwToxbtuwQfBojRpCyBec0FISnTxOdnEbT6nWEgK3c3NYThDQaBTdksv6VlZW9f1/F4MW+vX8fh9MkduzYb27emyAkBcxIuGYNzjhPdO1qHRR0VcBJpqV9oaS0vYD1ERER0ZKS8qhVjx59Nm/efelS0NGjZ6ZNm6es3Ab7gjLBdj5+/EyVSBKESJ8+A8PCIkpLKx1TExJSxoyZRL9ABReX8UFBIaWlPIKvAgAAAAAAAMB/DuhBPiCNg5RFSgqPyCRfvnwNDg5ds2ajhYUdFR8Go8TEpezTx5mpjIQJI/HGjJmOCwsLi6SlNajQMUqM/2RQULixcSd0a2lp/eRJUp1mu2DBStRwz57a/TATEpLHjJmAPTwJMhLpfMFHQdMWESFzL27Y4OnpuQ1pQ64Kjx4lRkVxnxlUUdFRUdG8fDkQn52sHj2VD9u3+zC7Z2Njj7a91iZxcfEEfQBTcJCEnzChMpgMGze3yrSAFhb9aSFP7oOiok5xMbe2fffuvZ3dYKZ5y5b6Li6TDh48ev/+g/z8PK7KsbGPnz17WVICmhEAAAAAAAD4twE9yIfy8vJPnz6fOHHe0XGEhYXVuHGzXF1nDxo02siop6ioKksuSNJZJJgPaWkKDLyG+3n8OIGqJo2zGTIhKw0NrWllISUjozVokCvubt067oAtghAfn0TZ0XrzqZOTkzdr1jxGCSoqNj99+kJdB/Ly2iMlpYSv0YrYkWGio2O47IYVZMaNbIIMeUpKqoULV6upadZ1xJiYOJyoAtOv38BHj/jltZ8+fSmqdvFiYF0HQly+HKyigp11ZaiDnMJaWobMUxxGhooNq4p9fSMibuNHSUkvnz6tlPCdOllRrr+qVMLKShQVW3fpYte//6ghQ8ahRZmbW23duvvlyzd8otQCAAAAAAAAwD8E6EFBuHkzumNHs6omIzHqd75KDR/SC1RJqTXTw6xZy2k5oPPrVwEu1NExp8pUmNOCenodY2Ie13ueZma9USfXrkXzfHru3GVl5Rb0/EVmzJidlVXP7OdMmnslJd0BA0Yz5VFR93Dvx46dZQr9/K6gkri4eHSdl5f/9Wtm/Qb18NgiK8vIcKE1azbyrEbFAkX7rFW/UXAPo0a5Mm86OvoRLv/zp0RWFm+gOrYCjx07m2mFXjcqmTlz/sePaRs2bKPfLPujTBBylEhk7Ia6wcHh/6+9+4CPomj/AH6BAMEQSCFUQ+/SpDeld6RX6Uiv0pReRVAC0gK8gPTea2jSJHSQjkCASI8ESSAQIZDkHW7lPFP2ntmdvb3y+378/P+8yd3sM7OXvf3d7c4orhMAAAAAQCXkQbply1ZJt5h9uCg0qTBo+s/w7bffSc81zszp8tFHnr/99s9S6bduhbi4uBm/NPwn4wwe/K3KCkNC7r3/ttLN8/790Hi/GjFikimGNG/e+vx5votRk9Kr1xB39/9831e48Gfu7pnMf/LFF+8XrBeyudDQJwMHDnFz+2fE6tRpmvAxxYqVN7xf58LyfDXyli9f4+X1PtdXrVozKOh9JBw4cOSHvf/+56lS+b5+/c+igWPH+ht/5fo+qbqkN14D7JnYi+Sf5zKTJ/urrBAAAAAAQCXkQS63bt0tXbqC8XTe48MdZEn9l944fajr/fsPpecePXra9LUaU716I2M77DGGQoWKHToUJKTCcePeB5MMGXLcuHHH9MOlS9d9CFCNg4KOC9mQ5ObN98usnz377wp6Vao08fTMbf4Y9oCSJWsK3Cgb0r59B0o9ateuj+nnUVHRn39e2xh4OwvZUGjoX23adJQ21LJlt2TJMhgvIk0vXQ+8evU/yzXevfvAmARTGV8S6Y3fHadO7OXhK90rmiVLjl9/PSGkQgAAAAAANZAHFejcubt0yaXxukGZSPh+HfkqVRonbKFfv6Gm6x6/+WaE0qUOE1ewYFnjt1fea9b8M9eodJXjDz/MELmZDwoXLpsyZbpXr95/U/bLL0ekXq1e/c/CE23bfmUMjBeEbzcwcL/xC1bDixeRce+nMD2VM2eh91/RuXq9eSNybpZNm3b4+GT9sL8yS1/wmS8/UbVqkw8Xkcq/GN7PO1SzZv3w8EiB5QEAAAAAKIY8qMysWfM/BARf2WtHPQzvb+j7d52+06fPffZZTemZNWs2MJ+BRJRr14JNl4Z27dr7/PlL7B/SooFaCAt7ljLl++/LPv20ihTHvLyys38UK1Y5Xbr3MapvX7XXwSZl/vzlxuy5yd8/wNTlRYtWW34mpzdv3g0c+I00Oer774Y9fEy/2rPnwIfviy18MmB4f/3waOG1AQAAAAAohjyo2OHDxzNlymY8zU+b9LWjvsarN99fPxkZGWVaziBv3iKrV1MXClSgc+evjdtJZ/y/aQyGZOYTfgoXEfFq2LD3q13ky1fizz//Yj1t3PhL9j/z5y+2bt1m7bZ7//5D41WaHxm7mdpYQFntNnfx4pXmzf9ZsbFNmw7Hjp158yY6Q4b8sl8O/nONaMqUadauVTLfKQAAAACAdpAH1QgPj2zSpKWlLwq9DQaXChXqSas8ZMqUY968RdoXFmGMgSk/3MaYJmPGHF269DxxQsPb1ljvjh8/K/3777/fsP85bNgkjbZ1587tMWMm5MpVyPjFXMoP87cYdu36RaMtmhw9eqJ27S+kXe7qmtH4paHcNLPM559Xv3XrntaFAQAAAADwQh5Ub8GCxenTm9ZxSGvMJlI88TCt9McULPjp4sUrrFZV06ZdPqRU6XpFV6mM4sXLT5nif/684Bv6IiNfssYnTPhnzszffrvK/ueAASPEbuXJkyfLlq1s0qS1cV4Xw4cv5qTpXg25cpUUuzkZZ8+eb9euk/n+NS5D6WH8TtZTWpqQcXf3/vHHn6xWFQAAAAAAF+RBIZ49ez5u3PgsWfIY/itFCp/y5atNnDjp5MnTVi7pyJHjCe5rSy/dz2jkUqZMlcGDh+3Zsy8y8oWQLfbv/345hkGDhn333XRj39Oa5lZV6cqVq/7+Mxo2bOnmluFD/Sn/u+rH+zw4aZK1k1dERMTKlas7d+6ZI0chUwaUZM2aZ/jwkY8e/WnlkgAAAAAA6JAHBXrzJvrEiePLl69evHj55s3bTpw49ejRI8tP04yvb0FjNEm4EEZG8+SSLl22zz6rM3z42J07A2/dCo6NjVG8xcGDR0htFihQ5NKl64rbefLkyalTp+fM+V/z5u3y5PnUYEhmVm+GxK7LfX8X4R9/3Fe8RfVu37517NiJwMB9gYF7fvvtXHS0yDlOAQAAAAC0gDzowFq2/MoYoOKlJy8Xl9Rffz1i1Kjvy5WrJi2AaCa1l1eOOnWaDR8+fuHCpbt27b1+/cbr139rWuejRw9Zklq9esMPP8zo3Ll3sWIVU6b0/W9VriVKfDZixHfffDMuWbKPjJfjxsuDhty5NZxJBgAAAADAISEPOrAVK9Ybw5R3vDzIfrRp0w7pMcHBwatWrf7662HlylX38PhYmqUzHje3zEWLfla3bsuOHXsPHDhiwoSp//vfkg0btv7yy5Fjx06dO3fx8uVrN27cunUr5N69BxERzyMjX4WGPrlz54+bN29dvXr9/PlLp06dPXjw6I4du5csWf3jjzOHDRvXvfvXzZp1/Oyz+jlyFHV1TZ9wo8bvBKWvAg09e34tVbt370HjrzwTfONp6NKln34jDQAAAABgl5AHHVhw8B3jNDJuCb9NGzt2SsLHr1q1vnTpapcv/75mzfqRIye0a9etQoU6adP6JRbWEnJP8LWdaTqdZKaV+5KQ0vjgtCyNNmrUev78JQEBi4w/d5FW7vP3nytV+MMPs5JKuBs2bLPu6AIAAAAA2D3kQQcWGxvr61vIGKDi58GhQ8eaHmN6/NGjpypXbhivkXfvooODb+3d+8vSpSsrV67/IV1KE6imNcZA1w+xLrnx6tMUxseYf83n9iEtvpc3b8m+fb/98cfZkyfPaNmyc/LkXh++8vuIPTgs7C9pu+vWbfkQCQ3TpgVIPxw16vvEuvN+oyEheIkCAAAAAPBBHnRsHTr0NgaoeLcQGvr0+UZ6wPbtewoX/nTQoG8CA/etXr2xTp1WMq1JE4eatZPsiy++vH49+Pz5yyNGTDT+Kp1xhYs0Uoi7cOEy+23z5p2lCWG8vLJs2LApXpt3795r06aj8bksSKa9ceOW6VeLFq2SIuSiRSulnwwaNCrReJst26dChw0AAAAAwCkgDzq2hQuXJxqgOnf+5267R49Chw8fVa1ancKFy6ZLl8XLK1v9+i2GDh3GIlh09Nt4rU2cOO2/rRm6dh1g+m2zZu0/fNNn6NVrsOnnAwe+X4fCxyfrgwehSdXZrl0343Mz3Lv3wPznAQGL2U9nzJgv/c+ePQcn6M77mwd79x6qcqAAAAAAAJwQ8qBjM95CKF3G+Z882KrVVwkfPHPmPPOb+iIinscZLygdNmxCt249/P3n1KnT8r9NGb78srt5CwUKlGI/LF26mvkP2WPYD8+cuWT6yePHf/7yy77z5y+ZP+yjj1iyS/n06V/xqho0aFRAwCLp3x++7ox/8+DmzTtVDBIAAAAAgJNCHnR4pUvXkb56M89xDRp8mfCRV65cNz4yrcHg4u7u9+JFpPTzbNmKfsiIrvHaiZcHg4P/qFCh6pMnz8x/WK5cLR+f3Kb/OXPmz25u/8xi2qvXN6afd+s2hP3k9evX7N8vX74yb+Hdu3fSP5o16/zfPPh+WYo0aT5+8+aNqjECAAAAAHBKyIMO79dfjxsz1EdmdxEaqlVrLP328eM/69dvMXWq/507d69fv5U8eQrj3C8uHh7ZTXnws8/qf5jVM97EnvHzYKJKlareo8cQ6d+XL1/7EC29pLlidu3aL/1q5sz/GQzppH+fPHm2YcPWCZuqW7e1WR5ML12bOmfOYpVDBAAAAADgnJAHnUHPnt98mOxFioQGltGkX/3999+fflr5n5lAk3kb131In0Qe9Enw3795kOXKeBs1zRRavXpjf/850r+Noc80v837tSRGjfpe+tX06fPSps0l/fv332+yX9Ws2Shem5Urf/Ghkn86UrFiXZEjBQAAAADgTJAHnUTjxm2NScpNSmEFC1Y0/+3Tp+GbN2/t2XOAp2d24+yg3Hlw5swFDRu2MTXYvHmntWu3SP9mkW3s2B+kfwcG7v8wlai3FELPnPlN+tXAgWN8fPJI/z59+rz029atu5jXWbp0LeOPfY0XtRqKFi375k204JECAAAAAHAayIPOY9Cg4R+u1UyZLl3WX38NSviYzz5rYHwAdx7cvn0P+59z576/dHPSpBns3ydPnpN+9emnVUqWrGnaRPv2vU1T1owdO9X082zZPilQoKz07/37Dxt//37dinbt/pn65vLlKxkz5jIuXv9es2Zt3r37d/FEAAAAAADghTzoVHbu3FuoUDFTHKtYseakST9cvXrN9IAGDdqY8mBk5Evph5Q8eOhQkPExHsOGTZBuDLx8+XfpVw0btjVOFPPvF3lHjx5fu3bNuXP/zi965sxl9pi2bXtI/zMw8MCHewzTsf/XokWXVq26mRa+9/bONH/+Ik0HCgAAAADAGSAPOqH//W9hoUIlDP9KXrBg6a++6rNx4/YaNZpKsStduhymx9eo0cRiHvzllyMfLgSVpL5w4Yr0q+7dB7L/XaNG/aTqefTozwwZsrPHzJy5QPpJQMCCD3nQV5o0RpI1a55x474LD3+h2dgAAAAAADgR5EGntXfv/t69B2TIkMvwH9I0pGlSpPDt12/I7Nn/O3QoqEyZmsZFDBPJg716/TNx6I0bt4xPT2u8MTCVwZBSWr6QGTx4tNR0mTKfHTp0xLyGv/+Omjt3gadnJmnTtWo179VraLlyNZMn9zGvyd09Y5s2Hdev3xAd/dbawwQAAAAA4LiQB53c33//feTIkZEjx1eqVNPTM5shcemMX9V5J8iDKSpVqrd1a+AvvxyeNi3A+OWgp/Hn7i4u7osWrWA/Z7+tWrWR8VfppbYKFizbqFG79u17VanSyNc394dNuMXbpLt71hIlKrFMumvX7qdPw/QeJwAAAAAAB4Q8CCavXr08fvzkzJkBvXsPbNCgVaFC5VKlyphEQkyUq8GQzPhfctO9ftRnuvrkzFm8Vq0mX33Vd8oU//37f/nrr6d6jwcAAAAAgINDHgQZYWFPzp49t2NH4NKlq376KWDMmMk9ew5q1apzjRpNy5evVahQhVy5PvX1ze/hkSN16o/d3f3Y//3oo/f/sX+kTJnZzS1LmjTZM2QowB5WsGD50qWrV6vWuGnT9l27DhgxYuLUqbMWLVq+ZcuOEydOPXz4QO++AgAAAAA4HeRBUOnt27dRUVEvXkSGh0c8ffrsr7/C2X/sH8+eRbx48eLly1fR0VgiEAAAAADAFiEPAgAAAAAAOCfkQQAAAAAAAOeEPAgAAAAAAOCckAcBAAAAAACcE/IgAAAAAACAc0IeBAAAAAAAcE7IgwAAAAAAAM4JeRAAAAAAAMA5IQ8CAAAAAAA4J+RBAAAAAAAA54Q8CAAAAAAA4JyQBwEAAAAAAJwT8iAAAAAAAIBzQh4EAAAAAABwTsiDAAAAAAAAzgl5EAAAAAAAwDkhDwIAAAAAADgn5EEAAAAAAADnhDwIAAAAAADgnJAHAQAAAAAAnBPyIAAAAAAAgHNCHgQAAAAAAHBOyIMAAAAAAADOCXkQAAAAAADAOSEPAgAAAAAAOCfkQQAAAAAAAOeEPAgAAAAAAOCckAcBAAAAAACcE/IgAAAAAACAc0IeBAAAAAAAcE7IgwAAAAAAAM4JeRAAAAAAAMA5IQ8CAAAAAAA4J+RBAAAAAAAA54Q8CAAAAAAA4JyQBwEAAAAAAJwT8iAAAAAAAIBzQh4EAAAAAABwTsiDAAAAAAAAzgl5EAAAAAAAwDkhDwIAAAAAADgn5EEAAAAAAADnhDwIAAAAAADgnJAHAQAAAAAAnBPyIAAAAAAAgHNCHgQAAAAAAHBOyIMAAAAAAADOCXkQAAAAAADAOSEPAgAAAAAAOCfkQQAAAAAAAOeEPAgAAAAAAOCckAcBAAAAAACcE/IgAAAAAACAc0IeBAAAAAAAcE7IgwAAAAAAAM4JeRAA7F1YWNjVq1cPHz68evXqadOmjRw5skePHh07dmzZsmWTJk2aNm3apk2bTp069e7de9SoUewBK1euPHjw4MWLFx89ehQbG6t3+QAAAAC6QR4EALvz9OnTXbt2+fv7t23btnDhwm5ubgZFXF1d/fz8atWqNWTIkIULFwYFBf399996dw4AAADAepAHAcBenD17dsyYMTVr1nR3d1cWAC3KkCHDF198MW7cuOPHj+vdXQAAAADNOWEebNiwYa5cufIQ5MyZc8GCBXrXy23JkiWsckoH2Ti0bt1a5ebWrFlDHM9ER3jKlClCem1rfvrpJzXDwnai3j2wIcHBwd99912xYsU0yoBJYfuiT58+R48e1XsAAAAAALTihHkwS5Ys9BPCUaNG6V0vt4kTJ9I7WKBAAZWbmz59Ov+J9r/c3d1jYmKEdNym+Pj4qBkWthP17oFN2Lp1a4MGDdSMpBCFChUaN27c48eP9R4PAAAAAMGcMA9++umn9PPAH374Qe96uf3000/0Dn722WcqN/e///2P//z6P3bs2CGk47Zj//79KseE7US9O6GzjRs3lilTRuUwipU6deoePXrcuHFD77EBAAAAEAZ5UB7yoEXq8+AXX3whpOO2o0WLFirHxJnzIEvTFStWVDmAmurfv/+ff/6p9zgBAAAACIA8KA950CL1eZAJDw8X0ndbEBkZqX5AnDMPPn78uG3btupHzwo8PT0DAgL0HjAAAAAAtZAH5SEPWiQkD86cOVNI322BkAFxwjw4b948Dw8P9UNnTZUrV/7999/1HjkAAAAA5ZAH5SEPWiQk/qif1sZ2FC1aVP2AOFUefPTokS1MGqOMi4sL+xPQewgBAADAkUVGRgbShISE8DaOPCgPedAiIXmQuXLlipDu6+vGjRtCRsN58uC2bds8PT2FDJqOOnTooPdAAgAAgMO6dOkS8ZxkwoQJvI0jD8pDHrRIVB7s06ePkO7ra+DAgUJGw0ny4NixY4UMly0oW7bs8+fP9R5RAAAAcEDXrl0jnpBMmzaNt3HkQXnIgxaJyoPp06d/9+6dkBHQS2xsbKZMmYSMhjPkwVatWgkZK9uRI0eO+/fv6z2uAAAA4Gh+//134tmIgnNI5EF5yIMWicqDBvtfiHD37t2ihsKx82B0dHSFChVEjZXEy8urYsWKPXv2nDJlysKFC9evXx8YGHjgwIGgoKAjR4788ssv27dvX7169bx58yZOnNipU6eyZctqcZ2qr6/vvXv39B5gAAAAcCjIg2IhD5qzqTxo7wsRNmnSRNRQOHAefPPmTYkSJYSMko+PD3vNsIh37ty5yMhI3kpevHhx8eLF5cuXd+7cuUCBAkJKYjJmzMha1mLoAAAAwDkhD4qFPGjOpvKgwZ4XInz+/LnAcXDUPPju3bvixYurH5/GjRuvXLlSQQaUcf78+VGjRhUrVkx9eUWKFBFYGAAAADg55EGxkAfN2VoenDVrlpBBsL6AgACB4+CoebBcuXIqR6Zfv36XLl3StMhdu3Y1a9ZMZZ2NGjXStEgAAABwHsiDYiEPmrO1PJg/f34hg2B9Ai84NDhoHlS5yOBXX31169Ytq1V75syZli1bqil4/PjxVqsWAAAAHBjyoFjIg+ZsLQ8yFy5cEDIO1kRfFIbI8fJg3759FY9G8eLFDx8+rEvZe/bs+eSTTxRXfvToUV3KBgAAAEeCPCgW8qA5G8yDPXv2FDIO1tS7d2+xg+BgeXDZsmWKh2LYsGF6lx83dOhQZcWnS5cuJiZG7/IBAADAviEPioU8aM4G86C3t3d0dLSQobCOt2/f+vj4iB0ER8qDwcHBygbBw8Nj165depf/j82bN7u6uiroRceOHfWuHQAAAOwb8qBYyIPmbDAPMlu2bBEyFNbBkoLwEXCkPJgjRw4FI1CoUKGQkBC9a/+PK1euZMiQQUFfTp8+rXftAAAAYMeQB8VCHjRnzTyYO3fuMmXKUB5Zt25dIUNhHQ0bNqR0qkSJEmwEiGPlMHlQ2W2DFStWfPPmjd61J+LRo0cZM2bk7U6BAgX0LhwAAADsGPKgWMiD5qyZB0uXLr13717ig//66y8ho6G1ly9fJk+enNKjnTt3sphD7L5j5MEzZ84Q+2uOjZLehcv5448/UqdOzdupTZs26V04AAAA2CvkQbGQB81ZMw96eXm9efOmUqVKlAdPnz5dyGhobdasWZTuFC9e/PXr1/SrDR0jDyq4UpT9eepdtWWnTp3i7RcbCr2rBgAAAHuFPCgW8qA5a+ZBV1dX9viVK1dSHpw3b14Rg6E54stp3rx57MFubm7EsXKAPDht2jRiZ028vLxevHihd+EkM2fO5O1dYGCg3lUDAACAXUIeFAt50Jw182CKFCliY2NfvXpFfPzVq1eFDIh2rly5QuzLs2fP2OPp1xnaex6Mjo5WMBvnpUuX9C6cA/GbbpNy5crpXTIAAADYJeRBsZAHzVkzD6ZMmVKKRdWqVaM8/ttvvxUxHhoaMmQIpSPly5ePM95p+NFHHxHHyt7z4ODBg4k9NVm8eLHeVfP5888/eft4584dvasGAAAA+4M8KBbyoDkr58HHjx+zp6xZs4byeG9vbxHjoSEvLy9KRxYtWsQeHBYW5iR5MDw8nNhNk0aNGuldtRLDhw/n6qbtf8QBAAAANgh5UCzkQXNWzoN//vkne0psbKy7uzvlKfv37xcxJJrYvXs3sdfS0gl//fWXk+TBfv36EbspcXNzi4qK0rtqJd69e8c112ju3Ln1LhkAAADsD/KgWMiD5nTJg0z37t0pT6lfv77q8dBKrVq1KF1o27at9HgnyYPPnj1LliwZsZuSdevW6V21cmPHjuXq7PXr1/UuGQAAAOwM8qBYyIPm9MqDFy9epDzFxcXFNiecpF8SefLkSekpTpIHJ06cSOyjpFSpUnqXrArvxbFTpkzRu2QAAACwM8iDYiEPmtMrDzJ58+alPGvGjBkqK9TC1KlTKcV//PHHpqc4SR709vYm9lFiX3OKJuqLL76g97dChQp61xvfq1evbt26dfz48cDAwLVr1y5ZsmTx4sXLli1bv3793r17T58+fffu3b///lvvMkGtly9f3rx588iRI9u2bVuxYsVio9WrV+/cufPo0aPBwcHslaB3jdYjvexPnDixb9++LVu2bNy4kY2M3kW99/z5c7abTp069csvv7A/QFYe+wer8+rVq+wNNDo6Wu8CtRUREXHjxo2TJ0+yXu8zOnz4MHubCA0NfffunS4lPXv2jJ2HHzt2bP/+/Xv27GH/l5X022+/3b59OzIyUpeStMBeWg8ePDhz5szu3bvXrFkjHR+WL1++efNmti8uXrzIXn6xsbF6l2kNbI9fv36d/dGZ/w2y1yR7GYSFhb19+1avwpAHxUIeNKdjHvT396c8q1ixYior1EKhQoUoxX/33XempzhDHty+fTuxg5J69erpXbIAxPmRJOw1oPsZHXtPWbdu3ddff12lSpX8+fNTpkVKnz49e83XqVNn+PDhO3bswESpduHRo0fs1G7UqFG1a9cuUKCAxR3NHsAeVrNmzWHDhrGQeO/ePb17IBI7x2Nn8qNHj2YvY9bNhJ9cpUuXTpfC2N8ji+c9e/asWLFirly5PDw8ktpB7D00U6ZMRYoUadCgAetIYGAgO4HXpWaBiN1nfc+aNWuZMmW6dOmydOlS9iztSmKv/C1btvTv35+dILGSZKY7YK+ZnDlzlitX7ssvv5w+ffqvv/7K3ui1K0ysN2/esPQXEBDQpk2bkiVLspeW/L0eqVKl8vPzY51lu2DRokUsIerdA2EuX768ZMmSrl27li9fPnv27DJ73M3NLXPmzOy8tGHDhuPHj2dpMTQ01Gp1vnv3TmYHmVu7di1v48iD8pAHLVKcBx8+fEh8IvtTVVmkWL/99hux8pCQENOznCEPVq1aldhByc2bN/UuWYCwsLDkyZPTe33gwAHrFxkbG8vS+oABA/Lly8e1j5LCDqQsaBw8eFCjgtmJCju2/EzAzkz27NmjURm8Xr16RamZmT9/vjTTlHAsrc+aNatGjRopUqRQs4vZyeHnn3/OznKDg4O1qDMeFtbYrrQ4bgruINiwYQM7402bNq18f/PkyZNUC5GRkZTX4eHDh+lVsVf4mDFjWLhTs4/Ykad69erslF6au1sj7Cx01apVFru/e/duepunT58eO3asmu6zQ9DkyZMF3pHNzk/YSLLxdHFxUVxVmjRpvvjiC/bX/ejRI1GFicXeC1ja7d69O8s1irspYWF54MCB1pn3j50xWjw+sAcEBQXR22QP/vbbb1W+J7KYXL9+fbbpiIgIZV1jidLi4YVhiXXSpEnEqlq3bs0eT2lWsnz5cuRBeciDFinOg0zt2rUpTxw8eLDKIsXq27cvpezixYubP8vh8+DTp0+JvZOwt129SxaGnTbTOz569Ghr1nb16lX2lp0jRw6uvUNXtGjRcePGsTdrsWXv2LGDqwzdv3WVsPRELNjV1VX4dUeBgYEtWrRQczabFNas1pdTlixZklIJ/fyfDe+PP/5IP9krUKBAUk2xRExpoWrVqpTC2Nk48b2PLnXq1L169bp16xZxcLhERUVRasifPz+ltbVr17KDv8C+s1Nf0336yrAXVc+ePYnTnhO5ubl16tTpt99+U1OYWCEhIcOGDcuZM6fAbkpKlCgxY8YMTS/oJV59RFy7asWKFZUrVxY7CN7e3uxk9f79+7xd27t3r9hKlEEelIc8aJGaPLh69WrKEz09PVUWKVBMTAzxXWPhwoXmT3T4PEi8p9LERu7WEWLAgAH0jjdt2tQ6VbFTkebNm3PtFMVSpUrVpUsXsbOnEq/KlrCDicBNK8bOi4gFi/0b37hxY9myZRXtOg7169c/c+aMwLLNscYpNdy+fZvS2rJly7Jnz87VO5k8+Mcff1BaYMFEvqrDhw9XqlSJqyouLi4uX3/9tfCb2v7++2/KjeEW4/CePXvKlCmjUd/btGmj4Cu50NBQlgQ1KknCUqGm395SsCNz586dua5jUYD9xS1YsECjLuzfv59SQ7du3eTbCQwMLF68uHaDwN4KR4wYwfX5JDsX0q4eotSpUyMPykMetEhNHmTvMq6urpTn2s71YFu3biX2N96bssPnwQoVKhB7Z3C4lfh+/vlnet/ZgVfriRHu3LnD0hm9JIFYNA4LCxPSC65PGBo3bixko2qw4xu9YFH3GR08eLBixYr8O0q5QYMGafECJs7LZDEPsnNvrimeTNTnwS+//DKpFtibHXGVJfUyZ868ZcsW5XsiseLTp09vcbsyl3ywPw02OFp33MPDY9OmTfR+LVy4ME2aNFpXJRW2YsUKEbuC27Nnz7g+rlSPnQmcPXtWeEd++eUXytbZX5nMULRt21br7kty5crFCiZ27ddff7VOVTLc3d2RB+UhD1qkJg8ynTp1ojzXdhYiJF7o0qpVq3hPdOw8eO/ePWLXJOYz7TiA58+fb9iwYePGjZsskR6m6QRl33//vcobx1Ty9vaeP3+++o6wt2/6RlOlSsX2gvqNqjFt2jRitXXq1FG/ucjIyG7duinaRWrly5fv6NGj6rtgTkge3L9/P+8Uxyba5cGTJ0+y80NlVSk2bNgwtbvkA5V5cOvWrYp3igJTp0612KNXr141a9bMaiVJBg4cKGqPEC1ZssTX19fK3ZQIf4tXmQfZ0zNlyqR1r+OZNGkSpWvIg3pBHjSnex4MCgqiPDdZsmS2sBBhWFgYsbMJPxpy7Dy4cOFCYtckNnuvvV27cOFC6dKluXaEdljkUb+Xue4zWrNmjZBhVIx+G+mGDRtUbosFnyxZsijaM8Kw/Ctk3CTq8yDX1/QJaZQHly1bpqYqNWS+r+SiJg+OHz/eCj2Nx9/fX6Y7ly9f1u5+annt27cXskcsioqKatOmjS59NKlbt67AS5fV5MGAgACtO5uUHj16WOwa8qBekAfN6Z4HGeIHp7awEOHkyZMppWbNmjXhcx07D7Zo0YLYNYP9r0Fvm+bOnUvfBdaRLl26wMBANZ2iX55tIM8koBH6hMlp06ZVuZ4jfZY5rX311VeiBlBlHuS9fzkhLfIge9tSWZVKzZs3V79rFOfBzp07W6GPiUrqyMNihda30cnr2rWr+j0i7+zZs1pMGqMAy92iVgZRnAd1+UTCXN++feW7hjyoF+RBc7aQB8eOHUt5ui0sRJg/f35KqUOHDk34XAfOg69fv06dOjWxa8yECRP0LtnR9OjRgz7+Vib/Wb28d+/ecU36p+Py0PPmzSMW2bFjRzUbat++vaL9oJUGDRoIGUA1eVBI7BKeB23kI5pE34+4KMuDrVq1skLvkuLq6hoeHh6vI/v27dOxJJOZM2eq3CMy1q1bp3f//iNVqlTHjh1T3y9leXDixIlad5BCfo8jD+oFedCcLeTBBw8eEFvQdyHC06dPE+tMdLkuB86DZ86cIfZLIvzOIydXr149rvG3vkGDBinuHdcMAAoW4RWlTp06xCIVv/5jY2Nr1KihaA9oq3bt2uoHUHEeJM5TbZHYPLhr1y4hVQmh8mt6BXlQr/mszMW7YODkyZN6V/QvsVMxm8yaNUvvniXu1KlTKrumIA8uWrRI637RXbhwIamuIQ/qBXnQnC3kQYY4BfeAAQNUVqtG165dKUWWLFky0ac7cB6cMmUKsV+Mr69vTEyM3iU7CBYQypcvTx98HVmcBjwpxFuMJXotavnq1SviVMkff/yxsk28efOG683LymrVqqVyDJXlwRMnTojqgsA8ePPmTVFVCeHm5qZmgU5iHqxWrZr0+JEjR1qhUxSm5VHu3bvHzkD0LudfhQoVUrw7kmIj34UlKlmyZDKBiII3D3K9cVhBlixZkuoa8qBekAfN2UgeXL9+PaUFDw8PdgKssmBl2MkY8d1kyZIlibbgwHmwZs2axH4xzZo107tex8G1xodFqVOnzpo1a758+T755BP2f1lyEbtAs+JvCVklxE2wsw5dLhldvHgxscIxY8Yo20SpUqWUDvw/XFxc/Pz8SpQoUaVKlbp169avX5+dwJcuXTp37typUqVS2TjToUMHNWOoIA++fPlS4EtUfR40TSutbLoS9gfIdlDhwoVLlixZ1oidrrC9w974FLQWj8WbmGQQ8+Dnn38eZ1xkUEF57BXITpjz5s3LglL+/PmzZ8/u6empoJ1ES2Ly5Mmj4One3t5s/IsXL16mTJly5cqxv0G2d9hBkvjhjzyxS6bSJzeWwfYyG3/2tsLe0xs0aFCrVi32b/YTyt63iL2M1SxFRMyD0h3Nr169UnZkSJMmDXvtFSlShO1rtsfZfmd7P2fOnEKOM0ndKYM8qBfkQXM2kgfpaWvXrl0qC1Zm7dq1lPLYGVdUVFSiLThwHuSa59DBVprQUZMmTejDnhT2ZtezZ8+FCxeeOXPm6dOn5l/dsn8/e/bs3LlzS5YsYSeTQmYuVfbCHjduHH0TrC/ixpiKvuDd3btK3kxr166tZLiNihUr1r9//40bN7JNJ/V5Gjsys9P4YcOGqfyEQc29ogryYNWqVdVUGw8LcUnVRsyD7dq1Yw/u06cPfaPsZLtr167z5s07deoU+wNMdAeFh4cfO3aMnY2wU3SFfTNSPN8vMQ/269ePPZj+NRwLvx06dJg9e/aJEyeePHkSb1HLyMjIy5cvL1u2rEePHmpmR2FDynVSxE4Re/fuzQ56ly5dYrEi4WiwAyMbycDAQPb3ouZTGtZ9ZbsjIeL5SaK8vLzatGnD9sL58+eTmueK/Zz9dubMmS1btlSzYuMnn3yiuI/EPMj2HXtw06ZN6VWxPd6rV69FixaxN8GE95xK2PkbS20s0HF9+p3Qy5cvEza+d+9eNW2KgjwoD3nQIiF5kCEupCXkRhUFiCs+t23bNqkWHDUP8q48KOTWchgzZgzXsMeTOXPmkSNHnj59mmujZ8+eHTVqlMrPiumr9Jrcv3+f3r76Cxd5RUREEL9fY5laQfvsNFvZUHfp0uXAgQO8m2PB5Ouvv+aaIcqc4huFePPgggULlFWYFJkLuoh5kB23iYtmshcMi42HDh3iHaXff/996NChLi4uCjrITnqV7RpiHmzSpAlxUT92Ur1+/fp4AVAGy3RLly5VtoZjtWrVKC9m9u7MXvYK3p5YMFR8V++ePXt4N5cQS2rKts4ORywEscMX1+ZYcp87d26RIkWUbVTxbFrEPMheJ3fv3qU80tfXd8SIEQqOVxcvXlR8TB47dmzCBi9dulSdhn57SP78+Tt06PAlWefOnZEH5SEPWiQqD9Lv9U7qAxztPH78mFibzPu7o+ZBrk8m06RJ8+bNG71LtntqZsnLlCnT7Nmz1ax3EBkZOWvWLD8/P2UFpEiRgp0z826U+IGMwXjJqMxxRgtr1qwh1sYiDG/jyj75b9u2rcoJK9hBb/DgwQo27ePjo+yqfmIevHnzJnswMXYlxF5+7EypVq1affv29ff3X7Vq1f79+48ePcpOC2XmK6N/P2gxGri6ug4bNuzhw4fK901c3K1bt1gXePvOYlGiX09YRMyDFGzwd+7cqazXb9++VXweLo+91ENDQ5VVJVm8eLGCkN66dWs1G40zXlvl7e3Nu122F9atW6dy0+y4ly9fPt5NM1u3blWwOWIe7NSpk8U3i4wZM86cOVPZ34IJS4Xs/Jm3715eXmo2Sj/ujRs3jrdx5EF5yIMWicqDTN68eSntiF0EmWL69OmUwuQv/3DUPDh8+HBipwy2sWiIvYuKiqK/kOJhpz2JXgGlQHR0NNeuN1ehQgXezXEt6s3OzYT0kah58+aUqlgQYAcBrpbZOSrv2GbPnl3gRfVXr15V8N1H586dFWyLmAfv3bvHHtywYUOukooUKdKnT5+NGzeynKugNmIetKhBgwYsyikoIFHffvstbwEs/yrYkKg8yI4/6nstdgpNdkL422+/qa8qzvg9He+dnqlSpVK5FCnx4GNuxIgRQvor4bqYX+Lm5pbUbTUyiHnQou7duwu8x7xXr168Bag5PgcHBxO3MnDgQN7GkQflIQ9aJDAPTpgwgdJO4cKFVdbMi/gh2LBhw2QacdQ82K5dO2KnmJYtW+pdr91Tdtugn5+fgovTLDp+/Hju3LkV1DN37lyuDb148YJ+BSOLMMJ7mhSWr5MlS0apikUY3sbLlSvHNarstaHyQ+9Efffdd1xlMOfOnePdCjEPxvEsUcTOPPv27av+lS8kD06fPl1lGQnxno23aNFCwVbU50G2I7Zv3y6q16LmLzXN/yPK5cuXeWtQsxDD1q1bubbl7e2t4HJ9i44dO5YtWzauStq3b8+7FfV5kB2o1X8rmhDvd9bKPi6T/P7778StfP3117yNIw/Kmzp1qt71cps5cya9gzaVB+mXZaqcuJjLxYsXiVWFhITItOOoebBy5crETjGTJ0/Wu177dvDgQfpom9SsWZPlKY1KioqKUnb7DO9Vo61bt6Y3zvtNnGL0U7Jt27Zxtcx7paiaRR4t2rJlC1cxCqbTp+TBVKlSxdG+HGQBZMSIEcq+DUxIfR5UfJGkRW3atKGXwd6DXr9+zbsJlXkwU6ZM0lW+AhUuXFhxPZIePXqILUnCtfoSM3v2bGUbevv2LdeFInnz5lV5lbIM9v5CXDXMhHc5aZV50MvLi53LadR9rvm+8uTJo3hDyINiceXBRYsW6V0vtxUrVtA7aFN5kCFej63gpa5Y9+7dKSUlteygiaPmQa7JRQV+ROyc6CsvmAj/DDxRjRs35i2M98tiriz8888/a9TTeIgT2aVNm5ar2Xfv3nHNcC72GrBEHThwgF4Ps3HjRq72KXkwX758M2bMsPgwFpHu378vsO8q86AWX8qYy5AhA72Y8+fP87avJg/6+PgontdUhuJJVCTNmzcXXpIJ11Fa8SotXNPYsgyi3UeCJp9//jm9pLJly3I1riYPslOveOuWihUZGcm1uqXim7uRB8XiyoMlSpRo3759O/vBquWaANnW8iBxIcKMGTOqLJuOOLvy0qVL5dtxyDxIv3BLIupODee0cOFCrtE2WHe1RwUTcfO+LbI/fGLLlSpV0qib5mJjY4l/1LxfRnDdm9m1a1eNOhjP7t276VXJTNeZKEoetDjayZIlW758ufCOq8mDWlyiFs/27dvp9Sj4qERNHrxx44YWXY5TsdpI3rx5NSpJwnWgVnakIk6hKfH29rbO9RLseFioUCF6YUePHqU3riYPnj17VrteS7hua125cqWyrSAPisWVBx2ereXB6Oho4szt+/btU1k5BTGfuri4WJymwyHzYFBQELFHEvlLakGel5cX12gXKVLEyhXyTgjPIiRX+8OGDaM3/vTpU426abJp0yZiMVxfyjx79ow+V2GZMmW062BCXKc9XFGIvoZjUjw9PTW6JExxHhQygwoF/TxcwacHivPghg0btOisJDAwUEFJTHBwsHZVxfF8TGRQOh9Co0aN6P3V7jrJhNh5jpubG7EwrvV3FOdBq00vRv+4MqmF6S1CHhQLedCcreVBhr1bUVqzzhcfxBujvvzyS4tNOWQe5Lqf3d3dXeC8Xs5myZIl9KFmkidPbuVlF+J4Zj8z4bqx6M6dO/SWV6xYoV1PJewPn1JJtmzZuJqlL/TA9rLV7pQ0oQc3ri9iVOZBb29v9vLQqMvK8qDW30OZCwgIIFZVsWJF3saV5UE182YQ8X5ExgwdOlTrquJ45r/NmTMnb+O3b9+m99f687Fzra5Ov2RIWR5kRxVNO2uOPt+v4ps4kAfFQh40Z4N58NSpU8TWFKxixoV+MeThw4cttuaQeXDp0qXEHjG5cuXSu147liNHDvpQGzT+ZF4G/YAg4b19pnTp0sSWa9eurVEfJewk2dPTk1KJ/MzD8bx69YqlPGIflyxZoln/ksQ6Tj+UnTlzhtismjzIRkz4jCXmlOVBa34vExERkSJFCkpVCqazUJAHfXx8tOhmPPXq1eOqir1uY2JirFDYzz//TCwpc+bM796942q8bdu2xMaLFy+uUQfltSPPOk7/YF9BHmSHhefPn2vaU3PXrl0jFqZ44S3kQbGQB83ZYB5kChQoQGlw3rx5KouX9/3331PKYCfqlNYcMg9yTWbLe/84mJw8eZI+zkzdunV1rJbtaHqp7CQ2PDyc3vjixYvpjWv6kdG2bduIZXBdJv3jjz8Sm7X+9cAm9NNd+sTyavLg3r17Ne2vgjxo/b/BEiVKUApLmzYt75p3CvIg72xCykyaNImrqlGjRlmhqjie6W58fX25Znylr0jOsPigXR9lvHnzhni24+Li8uTJE0qbCvLgxIkTte5pPH5+fpTCPD09FUzzG4c8KBryoDnbzIPEIKb1Z1/EJXVGjx5Nac0h8yBxT0m0/r4mUTExMa1atapue7juYenWrRt9nBnrXylq7ubNm1zVck3jHBERQbzF2KDx12edOnWi1MB7mMqaNSuxd0eOHNGoaxQ5c+akFJk8eXLi2tOK8+CAAQO07qyCPHj16lWtq4qnR48exNp4L6zlzYP58+fXqI/x7Nq1i2uniFp/xKLQ0FDiLcAZMmRg6YneMj0Cs3cZ7TpoEX3djVmzZlEa5M2D7ITz7du3WnczHvoX1g8ePFDQPvKgWMiD5mwzDz558oTYpnbXCJ0+fZpYA3H+NIfMg6NGjSL2yKDxFN9JeffuHb1Cazp+/DixC9HR0fSl2Jk+ffpoOqQULVu2pBf8+eefczXOXkjEltUf35JC3ynz58+nN3v48GFi1z755BONukZEX9iI+FWRsjzIO4upMrx50MqT/EjYuwaxPN410HnzoNUW6rpw4YJt7pSYmJhMmTJRquLNg8RvoAw8l2prITY2lniEJB7/efNg3759te5jQkOGDCGWd+3aNQXtIw+KhTxozjbzYBx5KmntlmDu0KEDpQD2ciI26JB5cMCAAcQeMR07drR+hSwP0r9wsSbt7qPX+r5aipCQEK6aiZcMSfbt20ds1sXFJSIiQosO0ncK1w0snTt3JjZL/FBdU8TbJxs3bkxpTVketM5E07x50GrLX5qj3829Z88erpZ586DV5g3jWnbhhx9+sE5VkpIlS1Kq4sqDZ8+eJXa2QIECmvaOgp2eUUpNnjw5ZW1E3vdBXRa3ok+/rGwJDORBsZAHzdlsHty8eTOlTY1uWmchgjhn8oIFC4htOmQe7NWrF7FHTPfu3a1foQPkQa6LRdu2bavpeNJxLUfIOxco/ew0ICBAi961b9+esvVGjRpxNUv8ToGdQYWFhWnRLy7E8z0WGymtKciDpUqV0rqPEq486OLiEhoaap3CzG3YsIFYIe9kU1x5kBj/hQgPD6dfO7Fr1y6rFcbUqlWLUhVXHqTPPDx9+nRNe0dBnw7a4vLNcZx50JpT+5pbvnw5scKgoCAF7SMPioU8aM5m8+Dbt2/d3d0pzQYGBqrsQkLLli2jbDpFihT072IcMg9+9dVXxB4ZdLqO0QHyINfMovpeI2SOa+1y3qnp6ZflVK1aVYveEb8a27ZtG73NX3/9ldgpdqqpRad4Xbp0iVjwiRMnLLamIA9a7Q5KrjzIe/2zKMQPUQ38H79w5UFrfjfKkhTLU8TC7t616nls06ZNKVVx5cHixYsTO6v1GotERYoUoVRLOf5z5UG9bppYs2YNscJDhw4paB95UCzkQXM2mwfjyJMqa/FpJBsWyqbZAZ/epkPmQeKUGpKePXtav0J7z4P379+nt2m1aRyI6OeQ+fLl42qZa6HDR48eie0X8YJVb2/v6OhoerP02ZmmTJkitkeKEc/Gx40bZ7Ep3jyoYN0Exbjy4Lfffmu1wsxt2rSJWCHvCt1cefDSpUsadTBRxI/L2LuANati2rRpQymMngfpF+ErXs5AOOLsN5Spkrny4Lp166zQu4RWrVpFrJB1R0H7yINiIQ+as+U8SPy03NXV9enTpyp7YY5+1OW6C8Mh82DXrl2JPWK6dOli/QrtPQ8uWLCA3qZeJ6JJYXucXjzv3FD0j8q5ZnShIL7su3XrxtUs8QIzg063xiSKeIVAy5YtLTbFmweteZDkyoNbtmyxWmHmbCEPZsqUSaPeJSV37tyUwipUqGDlwogfaNPzIP1axF69emndOyL62vQW59vkyoN6fT2KPGhfkAfN2XIeZLJkyUJpWexChBMmTKBslL0/cjXrkHmwd+/exB4xrVq1sn6F9p4H6XPIMydPntR6PLns3LmTXjx7J+VqfO7cucSWhc8ySrzL7/Tp0/Q2o6KiiHdCeXl5xcbGiu2RYixrU2qmfHPNmweVTdiuDD0PJkuWzGqLGsRjC3mwZs2aGvUuKbly5aIUVq9ePSsX9uWXX1IKo+fBb775htKgQfulmenCw8NTpEhBqdnipyj0PJguXTquCzMEQh60L1x5cMqUKSEhIbftB6uWawkAG8+DY8eOpbQs9uoIYnwYOnQoV7MOmQfpt7cbdFp/0N7zYPXq1YkNajS3khr0hWMMtEsKzT19+jR58uTExgUuyHjw4EHKFrNly8bV7OXLl4l9KVGihKi+qLdnzx5i2ffu3ZNviisPVqxY0TodlNDzoPWvSzSxhTxo/UWFiHmwYcOGVi5MeB4k3sZi4F9PRFOlSpWi1Ozv7y/fDj0PWvngYA550L5w5cHNmzfrXS83dqind9DG8yD95ilRCxEeP36cuMVbt25xteyQeZCdwxN7ZNBpTS67zoMxMTG+vr7EBvVdfTgpBQoUINav4EbgRo0aERsX+IE5cY2V8ePHczW7ceNGYl+4blvW2sOHD4llW5xahysPjhw50jodlNDzYMmSJa1ZmDlbyIPNmjXTqHdJIeZB9uqycmFi8yDbBWnSpKE0yE60Xr58aYUOEhGvm7W4HBU9D/LOTiYQ8qB94cqDuqwipBJ9UVqDzedBpmLFipTGRS1ESLwHXMFH9A6ZB6dNm0bsEePn52f9Cu06D9IP/szw4cOtMJ686Ne75s6dm7dx+gmwwM8iiCfG9+/f52p2zJgxxL4MGTJEVF+EyJgxI6XsOXPmyLfDlQeVnU0pRs+DOk79agt50PofVjhJHrx58yalNYOu31AnaujQoZSyixYtKt8OPQ+yY6l1upYQ8qB9QR40Z/t5kPj35e3trbIjccbJq1mRlM0tXLiQt3GHzIOLFi0i9shgXJ6DDYKVK2R5kLhwiZVR8uCOHTvoDdrmxQyzZ88m1u/q6sr78mB/sMR1Qg2CLhk9deoUZVvly5fnbbl58+bEjnz88cflypUrYwPKli3Leko8rFn8yI6eB9kWWUJRug+VoOdBHRcARR6UYe95kL4YDTskljXS+/DwHjtSET+PZWVHRkbKjAA9D06bNk3cbuSDPGhfkAfN2X4eZO9EHh4elPa3b99unb6kSpXq+fPnvI07ZB6kn4FIrl+/buUKWR5kJwxuVsGCJ/2ONkoe5IrbFy5csMJ48uJ6hVy9epW3ffqMRj/++KP67hA3R1leOR524kQfKHtkcUIPeh60+FWCcPQ8qMuqOhLkQRn2ngdXrFhBac2uyZ8e0PNgQECAuN3IB3nQviAPmrP9PMh07NiR0n6DBg1U9qVMmTKUDVHmTk/IIfNgUFAQsUeSw4cPW79INvJPnjwJ0154eDhxBiQDLQ+yCEMfW4vzdeiC+IWa5NixY7ztnz59mth42bJl1XeHcm2km5ubgs+LChYsSB8oe8Q6KD8C9Dxo/ZvU6Hmwb9++Vq7NBHlQhr3nQeJCfnZN/vhPz4MLFiwQtxv5IA/aF+RBc3aRB0+cOEFpP3ny5Oy0X3FHbty4QeyIsj9kh8yDoaGhLi4uxE4xc+fO1btkbS1btow4FJQ8SJ9g3MPDQ/5iG708fPiQ/grZsWOHgk3kz5+f2P6dO3fU9OXs2bOUrbRo0YK35ZiYGNu8y1WgdOnSyV/kSc+DvHP1qEfPgwMHDrRybSbIgzLsPQ/279+f0ppd27hxo8wI0PMgOykVtxv5IA/aF+RBc3aRB5ls2bJRNjF79mzFHRkxYgRlExkzZlTWvkPmwTjy27HE4hxi9o64HJuBlgeJi30zOXPmtELvFIiOjiaeSTJLlixRsInvvvuO2L7KPyvilC+7d+/mbfnVq1eenp7EXtgpFxcX+RUD6Xlw7dq1KnajEvQ8qOA0TBTkQRn2ngfbtWtHac2uzZo1S2YEkAfjkAdFQx40Zy95kHjKV7hwYcUdyZ49O2UTimdxdNQ8WK9ePWKnDDa2dJoWZs6cSRwKSh6kzzHCDmtW6J0y+fLlI/ZixowZCtoPDQ0ltq/y5Uc581Q2sVVYWBh9Yhz7JX9/ED0PBgUFKd2HCiEPIg/yEpsH6Wvr2K/Ro0fLjADyYBzyoGjIg+bsJQ8+fvyYuBX296Kgffp9cCEhIcq64Kh5sGfPnsROMe7u7tHR0XqXrCH6d1WUPEjP2jquwGsR/ZA7efJkZZugr9TMuwyECcsylPb79++voHFWVbJkyYhdsF/nz5+XGQR6Hrx8+bKynagY8iDyIC+xebBWrVqU1uxav379ZEYAeTAOeVA05EFz9pIHmSpVqlC2MmDAAAWNEy/GKKNiFTNHzYNz5swhdkpy4MABvUvWEHGtJQMtD1avXp3Y2ueff26F3ilTtmxZYi/GjRunbBPr1q0jbmLq1KnKNkGcKUjBFKnMvXv3uO7DtVMnT56UGQR6Hrx719onJMiDyIO8xObBqlWrUlqza127dpUZAeTBOORB0ZAHzdlRHlyzZg1lK56enrwtv3r1irhEnZrXg6PmwStXrhA7JRkxYoTeJWuIvaMRx4GSB+nnANWqVbNC75Shr6SgeB1hdkKVIkUKyiYUX1JOueq1QIECyhoPDQ0lrnxq1+TnDyTmwVSpUoWFhSkbZ8WQB5EHeYnNg/Xr16e0ZtfatWsnMwLIg3HIg6IhD5qzozwYFRVFXIiQ9xso4so+ypYdNHHUPMj4+PgQ+2Ww7W+y1GvYsCFxHCh5sHbt2sTWatSoYYXeKVO6dGliLyZOnKh4K/S5dxR8uxQcHExp2d/fX1nx7ODm5eVFrN9+yd/3R8yDnp6eL1++VDbOiiEPIg/yEpsH27dvT2nNrrVp00ZmBJAH45AHRUMeNGdHeTCOvBCh/FEloYoVK1KaVbbsoIkD50EWRoj9MhinGQwNDdW7ZK3Q1z6g5MHGjRsTW1P/h6ydwoULE3uhOE8xx44dI25FQeokXiyq+ObE2NjYjz/+mFi//RKSBzNnzvz27Vtl46wY8iDyIC+xeZC9riit2TXkQYuQB8VCHjRnX3mQuBAh8+rVK2Kb9+7dI7ap8sY3B86DM2bMIPZLsnTpUr1L1sTDhw/pg0DJg/Q5xkuWLGmFDipDnLmXmT9/vpoN5cyZk7IVBXOxUt41VF6y6/Dr0RssveaJeZC9nFh8VjPUCiAPIg/yEpsHv//+e0prdq13794yI4A8GIc8KBryoDn7yoOMn5+f2B1HXFOMHbRVVu7AeZA49aJJ48aN9S5ZEzt27KAPAiUP0tcgzpEjhxU6qMDLly/TpElD7MWGDRvUbGvUqFHEDd24cYPeLHswpU3ec+x4ypQpQyx+165dd+7cuW2H5OcWJubB3LlzqxlnZZAHkQd5ic2DS5cupbTGdOnSJSQkRO+/dW43b9589uyZzAggD8YhD4qGPGjO7vIg8VOy4sWLExvMlCkTpcGRI0eqrNyB82Acz3dABuMrISoqSu+SxaMvNmGg5cEJEyYQW3Nzc1Nzc6t2iHfeSQ4dOqRmWw8ePCBuiO0perM//PADpc0XL16oKZ5+5+njx4/VbMhmEfMgO/+3fm3Ig8iDvMTmwQMHDlBaY0aNGmWF3lkf8mAc8qBoyIPm7C4PPnnyhLi5mzdvWmyNfoRRfGeQiWPnweHDhxO7JpkzZ47eJYtHnzjFQMuDAQEB9AZv375thT7yOnjwIL0LFy5cULm5ChUqUDZE/7yIKVKkiMUGVd5czAwcOJA4SmfPnlW5LduEPKgS8qAMe8+Dly5dorTG9OrVywq9sz7kwTjkQdGQB83ZXR6MI89eMnjwYItNNW/enNJUiRIl1Jft2HnwzJkzxK5JChUqpHfJgoWFhXGNACUP0pfVY44fP26FbvJauXIlvQsPHjxQuTn62zHxE567d+9SWtu9e7fKyufPn0+sXH5WFvuFPKgS8qAMe8+DT548IS5R2rp1ayv0zvqQB+OQB0VDHjRnj3lw/fr1lM1lzpxZvp2IiIhUqVJRmpo1a5b6sh07D8bRFmgzJ78Ymd1ZsGABV/cpeZBFPHqDrAArdJMXfRoEdl707t07lZtjJ66urq6UzU2aNInS4NSpUymVq5/h5NChQ8SB2rp1q8pt2SbkQZWQB2XYex5kcuTIQWmwatWqWndNF8iDcciDoiEPmrPHPPj69et06dJRtrh3716ZdhYtWkRpJFmyZEKWu3L4PMhOsIm9k1SpUkXvkkXiOrAYaHnw+fPn9GXKO3ToYIVu8mrSpImVXw/EVWmIX/oXL17cYlN9+/ZVX/atW7eIAzVjxgz1m7NByIMqIQ/KcIA8SLzFuGjRolp3TRfIg3HIg6IhD5qzxzzIdOvWjbLFFi1ayDRSsmRJSiOi3kccPg+yDhJ7Z6L+fjEbQZyC0hwlD8aRX6VMwYIFte4mr5iYGPoy6wMHDhSy0SNHjhC3aPEW40ePHlHaEfIyfvv2LXGsunfvrn5zNgh5UCXkQRkOkAeJq6D6+PhYf0EWK0AejEMeFA150Jyd5sGjR48SNxoZGZloC7dv3ya2oOzPNiGHz4NxPOunS8qVK6d3yWIQv5MyR8yDxJMKg/GL7CdPnmjdUy7Xrl2jD4jAN3HibLc//vijfDvsARYbyZMnj6iy69SpQym7YsWKorZoU5AHVUIelOEAeXDFihWUBg2c6+nYC+TBOORB0ZAHzdlpHmSyZMlC2WhSR4ahQ4dSnu7j4yOqYGfIgyzjEDtosmfPHr2rVuvp06csi/F2nJgHudawULmeu3DTpk2jFy9wPhzibLeFCxeWb4clL4uN/PDDD6LKJi4vws7MRW3RpiAPqoQ8KMMB8iA9C9jjiatFyINxyIOiIQ+as988OG7cOMpGk7qWnvju9u2334oq2BnyIFOqVCliHyUZM2bUu2S1unbtytVlCTEPnj59mt5m3bp1te4sF/rB1sfHR/1kMiYhISHE7d65cyepRu7du0dpITQ0VFTZW7duJZZ9/vx5URu1HciDKiEPynCAPBhH7qxDLjmBPBiHPCga8qA5+82D9As+E94ltGfPHuJzr127JqpgJ8mDhw8fJvbRRNSNY7ogrkeQEDEPMvRb8JInT/706VNN+0tHzFOSRo0aid068b5LmVlG586da/HpYudEYocIsZOj2o6YmBiLj0EeVAl5UIZj5MF27dpR2rTBe8nlxcbGRkdHyz8GeTAOeVA05EFz9psH48hrTydciJA456GQZQdNnCQPxtGmZIzn9OnTeletUKVKlXg7K6HnwXr16tGbnTZtmqb9pfvmm2/oZU+dOlXs1tmhm7Ld/PnzJ9UCOzZafPrKlSvFls0CJqXssmXLit2uYizI58yZM5cs9gDKFKzIgyohD8pwjDxIP926dOmSpr0j2rhxY04j+eMDY/HkEHkwDnlQNORBc3adB9mbGmW78RYifPbsGfFD+NmzZwus1nny4MmTJ4ndNPH19bXHKdGWLl3K21MTeh5csmQJvdksWbJo2mWimJgYb29vetlXr14VW8DLly+JN3UGBwcnfHpYWJjFJ6ZOnTqp6aoUI04haLCNKSPYsZRYbefOnS22hjyoEvKgDMfIg/RJvBN+DK6LFi1aUKpNkyaNxUsIkAfjkAdFQx40Z9d5MDo6OkWKFJRN79u3z/Ss+fPnU54iatlBE+fJg3HkmRLN2dq9bxYRFyNICj0PUrKJuXXr1mnacQrKxZYm2bNn16IG4veqY8aMSfjchQsXWnxiq1athNdMX7ikT58+wrfOa8uWLcRqV69ebbE15EGVkAdlOEYejCNfCe/u7q5d14hiY2M9PDwo1TZr1sxia8iDcciDoiEPmrPrPBhHPuSaH22KFi1KeUqTJk3ElupUefDx48fEnpoTOHuPFRQsWFBBH03oeZCpXr06veV4X4jrgn7PIzN27Fgtati2bRtl65988knC51IuAxa1Ek08+fLlo5SdKlWqqKgoLQqgo1/JzA4IFltDHlQJeVCGw+TB2bNnU5o12MC9A/QEl+iHcopbQx40IA/SIA+as/c8SJ99MSIiIo5nQbTDhw+LLdWp8iDDjvDEzpqztRUTktKyZUsFvTPHlQc3btzI1fj333+vXd8t4rpzkLl9+7ZGlXh6elIKuHjxovmzwsPDXVxc5J+i3by49EU6evTooVENFGyUiHUSExzyoErIgzIcJg/S/+5SpUolcNJmBerWrUss9eTJkxZbQx6MQx4UDXnQnL3nQSZbtmyUrQcEBLAH9+3bl/JgdogWXqez5UEmZ86cxP6ao1xapi/evJMorjz49u1bNzc3rvbv37+v3QjICA4O5qqzcuXK2hXTr18/Sg3jxo0zfxY7Vlh8ytChQzWqmX6+Z9Dgvku6IUOGEIvs0qULpUHkQZWQB2U4TB5kGjduTGnZoOtHRg8ePCAW6e3tTWkQeTAOeVA05EFzDpAHiQt2V61alT04c+bMlAePGDFCeJ1OmAcvXbpE7G88y5Yt07v2JI0cOVJZp+LhyoPM4MGDudovXry4RiMgj3i5o4n5vb3CEa8HyJkzp/mzatasafEpic5CIwpxEgaDTuEozjhdDzueE4sk7mLkQZWQB2U4Uh48duwYpWXJoUOHtOmfBeyVQKyQeCM28mAc8qBoyIPmHCAPEt+pfX19R48eTSxVi7n7nDAPMpMmTSJ2OR5/f3+9a08E8ftlCt48SJ9ZzqR///4ajUNSunbtylVhnjx5tC6JeJunaaXRsLCw5MmTyz+4aNGimtbM9UFKhw4dNC0mUW3atCGW5+bmRjzdRR5UCXlQhiPlQaZQoUKUxg3GiWXYe4cWHZRx5swZYnnM+vXrKW0iD8bx5EEFE8wiD8pDHrRI9zzIfP755/QuW1SmTBktinTOPMhUrVpV2Y7o1q2b3rX/Kyoqqn79+so6kijePMj06tWLdyvWPIJNnz6dt7ydO3dqXdWMGTMolZguCaAsXCh2JZpEcR3TJk6cqHU95gIDA+m1de/endgs8qBKyIMyHCwPsiMnpXFJ/vz53759q0Ufk5IlSxZibWnTpmWvLkqbyINxPHmwX79+vI0jD8pDHrTIFvLg8uXL6V22SKMpTZw2D7I3O19fX2X7gmVzW1hnLSgoSNm9kDIU5MHnz58T19QzJ3zN9ETNnDmTtzCtv2WTPH782OLkMAazNS8aNWok/0jWWmhoqNZl815rPWnSJK1Lkty9e9fi96fmLly4QGwZeVAl5EEZDpYHmQIFClDal5QsWZIYu9Sj397I9O3bl9gs8mAcz4yIzZs3520ceVAe8qBFtpAH3717xzvbRlJcXV01msXdafNgHM9BLCGWgGbMmKFX5TExMcOGDVNcvAwFeTCOZ/JJc7NmzRI+MubGjx+voKp4s3pqp3bt2pR6Hjx4wB6cOnVq+Yex1qxTdrNmzbjG0wpJhIVr4i3YkiJFitAbRx5UCXlQhuPlwePHj1PaN2H5UbuZnE14L2Khz3uGPMiwPUhsP1++fLyNIw/KQx60yBbyINOhQwd6r2VQ1kVVxpnzYBzn9S0JsRdqUFCQlWtes2ZN3rx51ZQtQ1keZJSVpNFEc69fv6bfSmaOfhmhehs2bKCUtG7duocPH1p82Nq1a61TNtdEo5Lq1atrN6/s+fPnM2XKxFUP12RByIMqIQ/KcLw8GEe4mCEedgbCjnJiu2muc+fOXPW0bt2a3jjyYJzxNJL+3Yfppngi5EF5yIMW2UgepC9EKE/4soMmTp4HmYULF6rcO+3bt+c9xCmze/fuypUrK6iwZMmSxEcqzoNXr15VUJjBeH3m8ePHxY5S9uzZFVTi6+srsAwKDw8Pi1WxPd6gQQP5x7i7u1uzbAW3ZLKDzLx584RXsmjRIt5K2Fst1yaQB1VCHpThkHnwxYsXKVKkoGzFXLt27R4+fCi2s+xdqUKFCryVcF14jzwYZ7wWjr1giJvgnWMceVAe8qBFNpIHGT8/P3rHE6XpaSryYJyiu8wSatu2rUaLFISHh7PQWrp0aWWFjR49OiYmhnK3mkFFHmSmTJmirEKmZ8+eISEhKgfq8uXLbC8oruH06dMqC+DVrVs3xdWas/5knuXKlVNQZ/ny5bdt2yakgGPHjtWpU0dBDVeuXOHaEPKgSsiDMhwyDzJr166lbCWe1KlTT5gwISIiQn03nz17Rp+53Rzvql7Ig5L8+fPTB7lYsWIbN258+fIlpWXkQXnIgxbZTh4kLkQoQ4tlB02QByVcr08Z7A955MiRZ86cUV/SkydPVq5c2b59ey8vL8X1sDNwqTXiTOBq8iCj7BRd4urqylKhsgKCgoI6duyoeNMMC7NqOq7M+fPn1dRscvLkSStXzk636Cv9xVOhQoXZs2crO+pGR0dv2LCBmNESGjhwIO8WkQdVQh6U4ah5MI5n8Zd4fHx8BgwYoPg99OLFi+zPXNmbZrZs2Xg3hzwoUXDlEtvRVatWZS/IXr16sV02bNiwkUYjzIwbNw55UB7yoEW2kwfpb9lJuX79unblIQ+azJ8/X+WeMpc7d+7mzZvPmjXr6NGjd+7csTgdEHvzvX///qlTp1asWNG7d+9SpUpZnELEonz58rHzZ6l94sLxKvNgbGws67jKslmG/f7770+cOPH06VOZbT148GD//v1Dhw4tXLiwyi22bdtWTa/VIJ43ysicObMulR86dEhN2ezlXaVKlfHjx+/bt+/GjRsvXrxIdCsxMTGPHj1ifxcLFixgf1D0GeMTyp8/v4JuIg+qhDwow4HzIFO0aFHKtpJSqFChbt26rV69+vz58+wkjb25JLqV8PDwa9eubd26laXIYsWKqdnipUuXePuIPCjp37+/0lG3AHlQHvKgRbaTBxl22kPvezzlypXTtDbkQXPbt2/nmrWejp23sLeqGjVqNG7cuFWrVu2M2D/Y/2Q/LF68eMaMGRWs2iCDvZk+f/7c1LWgoCDKs1TmwTjj15rEkzSL2CuTHRgbNmzYoUOHPka9evVi41avXj12bi9qT9WsWVNll9Xw9/dXWb+C77xEWbFihZBdwKRLl65AgQKVKlWqVatWXSP2d1GqVCk/Pz8F9yIlxI7zjx8/VtBH5EGVkAdlOHYeZG9AWbNmpWzOIvb3my1bNnZAYIcF6fjADhTscJEvX740adII2cTChQsV9BF5ULJlyxalAy/H3d0deVAe8qBFNpUHV69eTe97PFofQ5AH47ly5Yr6b7h0V6JEiVevXpn36/Xr1+zQavGJ6vNgnHH2aU9PTyt0U70yZcqo768alLlD5f3+++861j9r1iwhO0Jrx44dU9ZB5EGVkAdlOHYeZO7fv+/j40PZor6GDh2qrIPIg5KwsDClYy8HedAi5EGLbCoPxsbG0jNXPOHh4ZrWhjyYEDvNYOcPyvaXLahdu3ail9bUq1fP4nOF5EHmzp07XEvC6aJy5cpJXYNkTTVq1FDchYIFC+pdftycOXME7hQtBAYGKu4d8qBKyIMyHD4PMiEhIR9//DFlo3rp2bOn4t4hD5oQV9TlgjxoEfKgRTaVB+P4V8CRNGnSROvCkAeTMnv2bAW7THcyJ36U+T9F5cE44yrhXIc1K2vVqpWonqq0Zs0axb2YMWOG3uW/t27dOoG7RiAXFxeVs/4iD6qEPCjDGfIg8/TpU/qaR1am8k8DedCEeE8KF+RBi5AHLbK1PHju3Dl6903UfKxNhDwo49q1a2ru/bQy9kpetmyZTHdu3LhhsRGBeVDSoUMHK/Sd18SJE8V2UyXKpbyJkp9yx5pOnTpla98CsHPyixcvquwX8qBKyIMynCQPStq3b0/ZtDX5+/ur7BTyoLmyZcsq3RWJQx60CHnQIlvLg0y2bNnoI8BkzJjRChezIQ9aNH/+fG9vb659Z31169a9ffu2xb5YfBEKz4PMwoUL1c+VKkqOHDlUvuVpQdn1A7Vr19a78P+IjIxs2bKl8F2mTPPmzV+/fq2+U8iDKiEPynCqPBhnfDMVMj2UepkyZdq9e7f6HiEPmrt//77SHZI45EGLkActssE8OHz4cPoIMAMGDLBCVciDFGFhYWx3cO0+q/Hz81u6dCmxI126dJFvTYs8GGc8cW3WrJl1BkQG24nsHFKLDqp09uxZBd1Zv3693oUnYsmSJb6+vsL3HV3atGmVTRWYKORBlZAHZThbHmSCg4PVLFMrRMeOHUXNzIA8GM++ffuU7pZEIA9ahDxokQ3mQd6FCK9du2aFqpAH6a5fv96pUyeunaipjBkzTp48metLkJ07d8q3qVEelOzYsUOvu0hq1Khx6tQp7bqmXvbs2bl6xFKPbWZbJiIiYtCgQa6urhrtTRndunULDQ0V2BfkQZWQB2U4YR6UrFu3rkCBApRKxCpVqtTevXsFdgR5MKFjx475+fkp3UX/gTxoEfKgRTaYBxl2LCJWVbhwYeuUhDzI69atWwMGDNB3PQV2uJg+fXq8FSUowsPD5VvWNA9Kli5dWqJECesMFFO1alWWQ7XulHqTJk3i6leHDh30LtkCllb69++v+NZIXh07dlSwnLRFyIMqIQ/KcNo8KJk/f77V5hwrV67cypUrhXcBeTBRUVFR33zzjfqViJ0zD3ItqTl16lS96+U2cuRIegezZMmicnOTJ0+mb+7hw4dC+mgROxMmljR37lzrlPTkyRP6QLGdaJ2qbB/L0WwflS9fnj566nl4eHTp0kXlXQ9FixaV2YTiZdp4bd68uVGjRtqNlZubW6dOnQ4cOGCd7qjHe/3AkSNH9C6Z5M8///zpp5+0+wQgT548EyZMuHnzpkb1FylShFiJRgXICA4OJtbWunVr65cnYSmPWOS0adO4WmbnnMSWy5Ytq1HvkkIsrFixYlYurFKlSsTahNx+S7Flyxb2+kyZMiWxMC6s2Xbt2u3atUuj4rdt20asZMqUKRrVYFFAQACxyK1btwrc7osXL1gG79GjR/HixRXPIfBRxy3y/7l12Gyos2L5r46TB/39/QcMGPA1Qf/+/U+ePKl3vdwOHz7MKid2cNasWSo3d/z4ccrm2JgPGTIkMjJSSB8tYgfYQYMGye9o9tuBAwcq+OpHGbahb775hvLaY+PJdqJ1qrIjFy9eHD9+fOnSpZMnT67scGeRn58fe7vcuHEjC6HqCz506JDMn8aDBw/Ub4Luzp07c+bMqVGjhqgZe7JkydK4cWP2HiT2okHrmDlzJvGoNXr0aL2L5Xb+/PmJEydWrFgxXbp0KvdyihQp2AkGO5ZaIRQvWrTI4k5he+S7777TupKEwsLCviRo06bN8uXLrV+ehB0hie8vvFd0v3nzpmnTptUJxo0bp1HvksJ6xIbd4n5RP78lrxUrVlBez+wI8+7dO2sW9ujRo1WrVrVs2TJHjhzq3wiyZ8/evHlz1iBrVtOyr1y5YnFHS/taxw8nz507R3xnuXHjhkY1PH369OrVq7/++uv69evnzZvHDpijRo0aPHiw/MHh22+/NTRYZeG/eisNn86ft9/yfHoAAFZw7969JUuW9OzZk53xqr9Gjr2dNWnShJ3GBAUFxcTE6N05zUVGRu7bt4/1l71vsnBNj4eZMmViA96hQ4cpU6YcO3YsOjpa766ABREREezUaPLkyV26dKlTp07BggXTpk0rs4tTpkzp5+fH9jI7rRo5cuTmzZu1PscDAB1dvnx54cKFLA6wN8FSpUqxg7z8x60+Pj4lSpRo2LBhv3795s+fr36VGbAdG089kP9vw6kHa4/8EfLESt+hAADQ/fXXX0eOHFm6dOn333/frVu32rVrs3er3LlzZ8iQwcPDg53furq6pkiR4qOPPkqfPn2OHDk+/fTTunXr9unT58cff1y3bt2FCxf07oHOXr169fvvvx88eHDTpk0rVqxYsGDBHKOAgAB2nrBq1aqtW7f++uuvt2/fRgB0AC9evAgODj5z5gzLiXv27Ak02rdvX1BQ0JUrV0JDQ62w8g4A2CZ2kH/w4MGlS5fYuyo7LEjHB3agYG8Q58+fDwkJiYqK0rtG0Mr/AcurOB4KZW5kc3RyZWFtCmVuZG9iagoyNiAwIG9iagogICA2NzY3MQplbmRvYmoKMjIgMCBvYmoKPDwgL0xlbmd0aCAyNyAwIFIKICAgL0ZpbHRlciAvRmxhdGVEZWNvZGUKICAgL1R5cGUgL1hPYmplY3QKICAgL1N1YnR5cGUgL0ltYWdlCiAgIC9XaWR0aCA0ODAKICAgL0hlaWdodCAxNTkKICAgL0NvbG9yU3BhY2UgL0RldmljZUdyYXkKICAgL0ludGVycG9sYXRlIHRydWUKICAgL0JpdHNQZXJDb21wb25lbnQgMQo+PgpzdHJlYW0KeJztmVGOnDAMho144JEj5CblYpXgaLlBr4DUCyD1hUoIN/6dBMLCdpjJzrDtRtrsKskXnMR27Cy1Q8VdM5Za2WYq2BpX9WZ2VT2TVm4UtahGHT9TQynbmcBaEzCtIjuU/ztLB+zoWVtN1KLKyFK9ktkN2ZF5OV97nq2V7bKxPBNRxQtL+IMMM1HNiiylI7DkZv9i72erF7HnZYZekWtlFr2SbsNepVyv6JX7TYlkMusrWdivZ2UhsiT29qusFbaGmV+ErVcs1uusM12vjevNxMp+ukumPJB59qIOJ1h7xNpMLHQZigrdlUaLSaDnrlNq0o0YpGOQEaL2tGIbYftDlj+UPZb5i03YSQ/9BrZSVr2COjAis+iVKz0mHgpXQZVFlxrFCrjC7Gx3K+ssliIb7Xdhqw9ixcLekzlhzR0sebbOx7Zd3OfqvX2u9/bqFrbKz1Jclk6iDi8orPo5dEpMKHPiFGnD2hexj8j8KjasF+bxRHaRuVd22LLNmkWrxmZBrwyc30R+glBcrgG9ch4J/vFx1p5ii4QdImtiD+7ulJW0ROy31dkfZIuUDT1HbLNh7W2szcom663kBEplC6aUHeMZebb4wfybcQdiol+B1Zkh2OgC+JFaHcY/Yy82VTCwpdyQpZGWMnzWjVA2OHgdT0gJViyF+9dSFBmBF34uz5ZXYJsHWJObrQfo5CjRk6xqUl6VK07uZvZuzrHS00uDURaRtYbYqmpQ2V5Y49UvsJNci1I9xLbKhogPrEnY9vOyU2RxHE9jd2T2BXmZjAMb718YYg2/wZq5dap063IRNtzdyBe8/WLEhdm43g17aq9OsI3a4F37/FRW3zY1ydPIWlxaX6g+c8wMpciOJHEOnr1gEFQtbBlZ9YqRTe7uTKzZY2dtC2zBL2cLffd+wxY77JSRlaRpT+Y9NpXZsAbVgWVJff27t4+9XVPLHHwddEs/8YHsqCzvsu2WDTmOCU91l2erB9Z7mqWFbQ5lnh5gx5jOZWJnDaH07QtKi9d0UZo+vLerY2vQK1dveMsdtXHF9glrIltnZec37O0yX4n1LnjDLuvtM7DrfW6izP3OPvveReYhJqcc9GrU6Fo/pa8J9RDMBsknq17lY82GrQI7BtXfsIjblYX9rtjCs+0Ys4mMrP2rzNdj/T5Pnu0PWVWS9j62y8bCk43bt2ut8L8YWEvL7G1hgGJrrvEJ2OYpbLFmmz3W6GMLSbBDmrqt2ZEwwlWTX6/Nw9ZOiNo1fKdKWquevsmpz/o6XQyEEXgL0hAaozTf/yr/dOFPWP4AxH2bnQplbmRzdHJlYW0KZW5kb2JqCjI3IDAgb2JqCiAgIDk3NgplbmRvYmoKMjUgMCBvYmoKPDwgL0xlbmd0aCAyOCAwIFIKICAgL0ZpbHRlciAvRmxhdGVEZWNvZGUKICAgL1R5cGUgL1hPYmplY3QKICAgL1N1YnR5cGUgL0ltYWdlCiAgIC9XaWR0aCAxNDUKICAgL0hlaWdodCAxNDUKICAgL0NvbG9yU3BhY2UgL0RldmljZUdyYXkKICAgL0ludGVycG9sYXRlIHRydWUKICAgL0JpdHNQZXJDb21wb25lbnQgMQo+PgpzdHJlYW0KeJzFlrsRwyAQRNejQCElqBRKg9IoxSUQKtDofB9AdgG+20TMI2G0ewuA6N6oH1TxJU/0Ilala9cPq0WgCzjqDaR3rrIOReCfE4y2E8hxyD0AM5in2PGbVT9EUx3ZFtUf8TkSqR1HK2aHP5KGQJEdQ6M0nBE4IJXjmagV8SkC6XQQNnXIwhKDWOoQ9HTwR5ZP9Ql4ZtcXjUG9rDGD0OyvldX8d6QdrY3ZCva+atsfWWOOsgpEGk99xwQgPYoGU67MtO40T4SnNzmfJ9aLzhN9ANiG6dsKZW5kc3RyZWFtCmVuZG9iagoyOCAwIG9iagogICAyMTIKZW5kb2JqCjI5IDAgb2JqCjw8IC9MZW5ndGggMzAgMCBSCiAgIC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCiAgIC9MZW5ndGgxIDE2NzMKICAgL0xlbmd0aDIgMTAxODMKICAgL0xlbmd0aDMgNTQ1Cj4+CnN0cmVhbQp4nO16ZVBc29YtHmgITnBo3N3dXYI7BGugcZrGgrtrcHcPwS04BA8El+AaIEBwQpBHzvnuOfd893t/3vv36u2u6r3HHHONNddcY+2qrmoaCjVNFglLJ3OQrJMjlIWDlV0QqAp2MHdz1TRzVGbRAFm7AZ+DvAAaGi0w1B70HyyARgcEcQU7OQr+K08KAjKDPgekzaDP6Vo2bkAJN2sgkBPIwS3IxSPIww/kZGfn+1eiE0QQaAVxMgfQSDk5e0HA1jZQQeBfj0B6bQ1dBiYm5r8jHAICAkBzr38xQGmQK9jaEUj7D4HnqDvI3snZAeQIFQJKPYft7cEWQGt7L2cbV6CZpSXI8reGjpk9yA4oC7YHOzs7uQPppRj+qfJcKAfL8xcPgAYoB3IEQZ6X9MfA382SdYJYg/5YCzsfJxeQ3gYKdRZkY7N6pqx+U6yuVqyOICjbsyaNjKOllJPD72pcAYDfg6XBEJDF8+K92P57t+0cnTwcvf8jbAV2tPwtDbR0c2bTdgS7uIEUpP9MBjo7OQOtzOxdQUDvZxbwN20NggJ52DnZBdg5gSAXIMjTwobt9+xaXs6gP0iO32EzR0tf7780fMFWoOcbwNvVzB0EhELcQL7e/078EwE4OICWYAso0BxkDXYE/K3+HAZZ/YlVzKAQsCfQkJ31uZ1A9t+fv56Mnz1g6eRo7/V3uqqZAwjIJmUGhjj94Ul2Fva/SUlJJ0+gNwsHHzeQhZOfB8jBzskJFODhAvr+Qwj4P10ANjUz8H+V92+SCo5WTkCBP1fx3L7/WgmQzf1PXwPpf/uaAfhPfTZVJyjYAvRsmb+MacTOw/7syecbx/9o2H/wf9n2/8ypv7Wkfiv9bdH/qFDWzd7+j17S/+kl4LOZXIHKwN92sjeD/Ge+mQPY3ut/GPEfmbqgP8/m/05JAWr2vAYJR2v7fzUayAZ2lQV7gizVwFALm/8y65+EtqMlCGIPdgSpObmCf783gCwcPBz/ndSyAVvYOYJcXZ/9/AcHcrT857RsMo4WTpZgR2sgJw8v0AwCMfMCsD/bkJOHB+jNAXw+PiDPP04AkI3V0Qn6PATo7Ab1BT6fVcDvbefiBLK5Ops97+lz+I8INz+Q7S0I4vR3QADI5uT4d8JzKWxQj7/553nZ/mzNvyK8zxHJv9HzeJm/EN/zaNm/EP8zUvsbcQHZNP9G3EA2rb+QAA+Qze13X1wtnCB/FyPAB2Qz+xs9127+F+Jgf1a3/DfI8Vzov8HnpVv9G3ye3Ppv+HzC2Zz+DT4XA/k3+Dyt29+Q8znZ6w/4j82xcINAng3+xxl73rl/YSvws0NAIE+QBeBn8ClM2vc9kouy1IQFoV9KMHStahd4kYgP1cQ91At0jgDsltWJNf1tU6JGtZxHFI9jw2vqo95bgoE3OGqzxIiMEKjgeK3mEdIxAo/NDKEte9c0kNKPEa994gpZOolikisqzOqAGrOiMnGuiBCmQhj2OwWUeUplwO6pAGsCsyVqg6j9qOzCft2r2JVTZlDMgZdKxQ7F7KyawdjM4x1x7pzWxzeDUCKLgXfLVT9E3IR9meJiYLe3AiLjA1+WpGtgD/d8XNv5VpVF2VXO69IjiyB1fNCFIGFzW6XWpQedtTI9u6UmqTejB9jUMM+jv5KfvDRCragzamQLo3CBlLRq0xpRqCilKPBNESe3tIXzCwhvZO8gw+1id5W5vN47s5iq94kbhhLeJbpDCaoGlJOLCtnSbnVqiyFZe2nc2mU3NZBBFy3WXChNDFM046x4QXPW0Bqr1rFt6pfuNeSGN07drpmbYe49/DBPohaD98Akbw69b9NjVtKHN8Ekdj8OGjILJGT2KVZk95OWs/bDUgPi359L8P5cRguAH+P+3vqByfdgg0vrTUeJSARZTTGaRXDq8ExttCNTvK9ZxBft+SSlD7WTfrfddIQK+hG1p9uuq0SHabWXYV+0TQTNx4l54VT73xcSP75iqL7kSIqvbjxcGiATUW5Cn75Wnx6zL/9Sd6qwtIW7YV/L0EEkXo/IwH/d8eUNrEju1PEI6lqx/oC86JcpOXT2E4DH9196GjYVitGDXb70u1AQDaYPQAzxlwn3RiEeBLeLJaXYaa1v2/aBACni3gUtqd4XXP8UTT50IzynYe68+1HP2B1apL7UWbLSgd/4UlafClOMFG5f2iE5AQ7kYTHMHL/Bx/iYjKmCG+bVuNsFu3aNV5+y0GdCbYXKiLle29Tm1MO0vryC+WF1keLpevTMH2u+bBLPV8SfqNAAZqIY+8XwpqmVCFWdAwP3015pr+eszt3EOcasvRLX9dTRGXzL528Ob5+g+CosYnpAAnuwXZLMTzetlmtqLMk80qqjswnFro5VRuVxEqT5p9ilhZ887RoE7gaBR8KQt+LL2/jrkWOc3HXFZ/l1ThHHr0JmMO0Z7XujjmdqNdaJWMvGWwcHsYi65uE8KgvbyEmLy4VRWV7XxPzaChfyz+07eI+IYNCJxdX2xpnA+DDtDBkPVA3CyTuDg1GC2dx3pvJktCnTQI6PjZkWEG24VtAWQ93NCrbxl6ZhwDjb5vZS1a9SGRKiPeuqVjZ2UFHzXZgSCqfAVaFvZW03VwBtL06+40YWtJXF9oCzQ9QTS92J7MJfuXvvOm6YbUqh4el13ptc8pEAnN11caiKm4JOVGL7cSb6CsU8N+o1+jqKbPHbQ0wAcqB6xVbqVNOCTVR+Qx3Fr3edGp45lPec8zJUCIRT66SYtpkJyfGzUkeWhdgN+gY4gorsUkpq0Cfc63c2a6oIGuoaupXzXKzTKTp4zC7UL29LoZZh0zG2OHgdRiZfotST+h68i1c/Q1NxMrf2dt4OpJGtRJSTAXWeApGJIRP9Kud73fjADiNrzRrObZKNr2ENVaVvaLkMdM/P6OluFNSDJX1Fw4jtCv1ddGEEkyrEjcK1b0hHPfcmX7A/6qspdyUInfTRgZbJeQ473OwuWDMCTmbRXhIBybDLiC3wSW9G08h4e0QD+TYoxPbcGpLeR3NyurHMhCkH/XwKvcXtGnMaUgeQZeB3nDEmDJwwbPB+1aKLp8ZxJT0n4Jq6xX0n3eioLNZ3lyq3byf8ENj4GGEpKunn75AF+zNVf5djHU2q8VJ1mvcM/yNM8MtegCFB6tgJ0sNB40MWKk4IYv7H4IWYEi1bJgkFDE1K8TfopXQOmTilQumfskfyJUMgbgkon0sIBtYnqBW2fH+OVvMaOG6XV8tgWQ8jRaSZDiC8ep1MCm9oS3plJH20WiH/WqWi1gcxczcVv+eb051LWxk8ZXVDM4wLdniG7E+1DVU7C2mBWkfECk+i7NK2sbsr5HKereWaEd2lN/nw5Jk7zPiTc+9/Bbrbnht8ovIkSMilV9wpFmitL0F8udb1snMncYoHzyS9SjcD4YuMl1/Jq6+VMgzWBvw4kAcpP82ffCw1/D9Ovm8FeZDv90/tBZiNU7PKiuOQdU9yo8qIyttMyNMg+d4Px/iG1Gz/cKyHK8/wqtRAV8OOOBCDsg3bOUltfyyWQt1BRcmu1dRG+Gk1qfaQVfHAE18QUpQ7/qASZ6Tabygd/T77qvPMEJIpwVdhFmUg/dU3TbzA5GXFMFwj/Sz6nDjZgmnKEA/2mLkVu2kMFFfwrqwrVIexOjZQvmQPfrs5K9LZDo3kathwrYEWi5IEh1OPhyr+RIFVEbnjAIqZPPC5/3g+8RsLpbKnlT4i5tbQ+PiLTdyQaxYvdrE5/Z1drCPr9hadC8DEuA6G8akAXHcTY3dLpfw+Gx1ny5FrD7aYw2PaGDjSh7Rhzv+HllX/+I4rOsIBRcmuY9QqxzseXgGhHlTBBimDNrVUAzemRKGHJ8ry/XXvAYndj4IcSV3lBZ4MdyUMu7aDuQY0pV7ueHMyJkpSSHQ/+wiLLLvRtK2yWyE78Mck6/cWk0lUVXrEbQupnknrpwHLEDGCzYyNJn/sUzhMTtsazoigpSABlvtZdLjMmSuRz0GoimZNAR1ixArUmldDGhtGzoa3HYjyRloZtwilx4z0Hy06/ALcEuDH/X4ULM2RVpF7GuMehXVmvpakSCEtid+nFaXajaLGXTluniCUumYEgRMCunViegZduBchkRayDnsuXHTYifzT8FWxtS/yLLgzhw9tMofMjMPs/E1RWRO5hMwWhNPFiUlqpVgT7rwXffNer3/vYJnbPdldLlvoTMmVCPTD1zCQY+zh6U9sI5hT6I8+r5e0Tr+n+0BlADFev1rx+tCUmD4iQ19kF+MzZyMWdQX7otEE7qF4KLzxTLV4z+LjPF6XIft8WrLONSGxmvqlXjfhCNFIZmYoYr5g1wq1UGE4MxdiK3ZEWZ7JogwrI/MHsRJs5GR7SJufg/4ptDM6dWJpTWZh0r00ksQHKZEhQrvIytW3okt3KYZmSEKdcji9QEcREchbVKmNxjTM7Tmwqc+15qj43rL+wMcSwBqF+06w/Vb/xY+UiDFEKTYXJ8TTnVLwlH0E6fueu6syhJXEKxhHdUYzPhqvk8M3pu5jAfILRGSK7GkddFJC/WainUGJO3Sd2bfCKGAdFk0LKzbo+fc5Z5eBGY5CkH63KLsvOgtJZkLRfRKNCq2OtnEHWgwPXLoBzD4ihagM5dzJ+Pib5A8YhCBYGoAkTK2SArklf7yPYJGXc3lJt2c+pKN9kOTrj5/GnTita3ejmyWcHo/FZ3lBeRMzjeOOwKw+M7usBQ7m8IWYZaM8u1E4fbWw983LIy0d9nmjui7gZIymmMAoPl3jnURKCvr0DBDIr7zp65It2O8G3xMzEz+w0/drFgXnsktD+8gbJ/C9maEH1wKKONPQHTL2VFQhW5W5f2L7ZZHs4hk8LzOsWVGTb3CovcW4i9oda2+75ltl0+i+mNCZiI5LTHW9HyOzAF2hKnrcQIStr6/8f6DMSHDDxExVKR+KJ/aozB9RadqYe44RIiJ5hMHVlkrOZPhUpjMyCYfcdqR9E8T377IHqwNxx26q3OaKDBApCqeAKp4eNwbRyK+WUXv0esYvsdoE3hGPdUzNLDFhnOTqwdJ61H21NMKtsWgZ6N+pi45EhqssY34Z0+Jyoiq72ryAG6l47Plq4ZVcRstLb8uNEtS0bN5X+qjhJO17qMkKX9iqdEEGnfIhIhUW+vPIPltphY6E1IlTYyHAhbGzFavmqEaG3KlPMEf1DttvUizkZ9P6e0LjAtlmZoUXbCW2XhO6MXVEKYLV0d4Ffm4tZOe47anA/Speql/SREZ4n0AtRzKhChqrQizSYWRgEIjwsh7beccxpnfAW9gcLiJy/KvBU0IBt7AltnG5IKC4EZDG9t3mJ0fLGwqU+1vqcajAIVg4BhnQNe3qLks6d2uFP1XBMUnyAegYzrj+MtqRRyFve92Bn2ulcPmEH/HB/Wbc5LzaCRFHNbbEEA8onwpDD/9ewc+67zC43tKuVxByI2LJ2qVEaghTLA2VML3gJgrh0pHajdsbVPx8/52iW83Wlr0FlXwCqc2bash6f4chEhj664khtdJmx8POfQ3D6vghshZvXFBUxfnzRcf7HPnGTe+0K5vaizptXxjh+R/hlW3YBSYRu+SOXMPD9VXgu7jAH5luG4hpVGdr+lHAT9Z7gc3oKUrzi2hueOGo83hxbaJRtzUuWiw2BClbDdSR4Nco6B2GOoh4CGG0t/Btqe1FkoGGBusDfMsTsHMKpRy5T7oo9po+mYc+YrFLnrO01OyKNqfUdF97qhcNaFMxMLfli9BWZHcceq1hrimQH+15ENP0H8FDDMfzI2TBnxU0TV5B7Rye4L3o1gRa/I7C7lYzPw0n2fR/YrjDlwOh/dy0d6et6dPDP5TtrWICvM32LmIP/0qYue6W3JdWwb+68F7fCCypzbhvjXUp3dTKVcG8fohg3uxuFcqTPdv3YQdLJUKJCmMlbD6gog4gxTUf23z0KeXWBkmAXrdJS2NHMqKYLUlOq4/Oc3Chu3+Wy7uh/DpA+8v8FSIfTEvIQIBa9gcLLnsbH8MZATdn5C6tvsgCdPO5YdETHDHOV1E2lTS5QXLzH8ZoqEWuR2Oo4krFg2i1TQniXIt9W0wFCeD2j3nfVbTsHnvDc+JjpXoxz9ExWaJFaVqkRYVfRedgUs7tf+Cf2jFu74ysTTnK1SP5dtCPW45HDSq2Epste0+Y2hP1mDqTZmz9sF1z+dA+E+Oag0HcqPc6MUvJMAqpMrlDUMK88gPspKn/tGrCqnykl0H+g+TO+EHtAKeAxVbEcqswKVZ4Lw2pxzKJFRwHyW2Z6CtwjpVst/mkTcMyv7ZlKPJxm8+FmeoULilezLdbR0STBO77kSH/naskGtJRP7MUen0dH6fLu2C8Jq0ntcbo7yoZ77EpWkYLZt/GSwFezObpYKeWSXq8b+sjiVYumOkWssZDuiGNj5jx663l5uzrqORy1+/Xlas9wqd4IapqesArO0TYoUr3qelxjk3GSQovar4d5eCrAxr+nVvn2wCoud4NRP1Vv4fzltlIx82V58UZ6xC8D/abRUcV/lOqCEeCyy0kAWek4YF2WMRNjHEaRZc44ejSVReKGDmbx5dxj6ywMipzrBfsAQUcAlgTooA73asWFZ+ac2mMM/mL4Y8bY9QE+by+q/X4T43+c+62zV+5iJemPbIPB79t7+kweS3J7YVP0JHHwykbPZptywgPjcqcDjYHTS9HTayWId+Dmm22QveuJNseelyQge4D36mw18QpXoe9qjYzoAV3KqBHzvXSzPfM8wXiXM74k9oi9dEM1X7zgjrhmPoAlNJbFnwKUyAKb1RX9c6TRuklPJGH99BEFusqpkLHA3RDDgq+kzPApln41J+dgqbqcDvQMypmXqDuE+aJsH8NHRMCHcWsmjbKWLvC4F/eqs/WGeB1S2HfZCPOHcLXLQdgKTxevZc6Pna+dN6jDHwoWM+9YSNAYm+oU8ttesh23f/BtiWmrWoIhfG1CLmmMSnunC49yudAX37B6Rkyba8/J8L/gwkr0CRqdSNA2ZzUvrNA6ok91NSi6jOMPgE/vbnhQNi5j5yafqhRzuemYJLdUM7tvXQli+aEQlfXhbk9nC4rxzmWPKjYTL9r+uvu1uoA8ZU+ytUnRVZyh3fXilXvM+kdRV9YZ66E8ocOg+L4ak+/2jyq9+T2NTen8bSqMBYHeomh6wdtIC2F9vS1ziS3oAhSwNrshkVUkbxoywr1S9AQSzgUey0Jd+NG+sl4YZzAX/Fu/FgvewI2UusNI+3IdHCdYn3l1q6AiIQjrOJSLt1toDIq8fj8TXqYCbXQSjLy+s3efVET3u7xzIfMXjKlH2UtZbwpma/lZ3cJv4BphQBnUl8vOSbWGVtm15mrm2npDmX3/YBUomsSVueJRQV55nRkpdGbXKXEFpKejE3BmANbteFBARjyM1hlvcZEDmI+dobnSyTnQu/w372vwl/6EVpRM5ajcj7wCU2TQU1YMWpRbF3ajt1ep3A0rVPtQp8eAe/2s9IPAvxlDNj8neg75bdkoT808rnAte/9arFpQjulZVY1E5lYyL6NLbVsuZhZeaFxXm7Y4EjA1RwPvt+Z+ypQzXBJXTu3TfclMAOwcjcVb9us7E+YvXr0QwEVM8QXL5pCvXw/y/ntJWpxUUwM3L3CylPZVqDgApSo5SPDiCiuQ2AXI3WyF16QWbxCZV7C6/hKskHBrwHZs66H1lJqqfgP3bQvxakbMUlT2awuzBZoPBG2tTbUYCS0JxEx7m3X+2JeQxk5OT7d8Q0N06YbqWWA45bDfhS2nz7BAFSh+n6Gy8DbfifK1aXXlyavqhMfaNDbFr+kEQ90Lxvao+EuqgUBOe6HzzngBHWzURTEzqvzIN+IldErLe+5YVs26estGcuQTzOQPVEIdco8lpDv0FTJWYp4RVGi6JvPJE5taOLYbLerkbOEItekvPKXUycrA8se7rYeczorhSikdN/tWLZXcttCmPlNXAQiFWaY3Ac/n6IiDmngvJQ6GjDA5/9hHojioiYpFqHlnBTTiydcS0yZsr7pU5cjpKJrjiJpEGCXvigagBdtjf3SMb/UOFmPlTpWZPF9kN3HHtSsnC+X80ilhkSo5Sflwk9QE0pWJ1kZRIPszrOlRNONnVCV9LwX42ICYpAPaf26xk6ksVLYIdFDTKfpMQO8fWLKtoza4LIchHwrd6yi2vOt1lGA/YamHbpMLsWqCHSxQmnT/qsIqFRTaQ/ZJlK6Zz7GfDs9A1Nxfjf8O4ZYT9qE1syWUR4y6XehvSFiF6W8pF5t/PUFi7ZuhNR52EfZhIpyvG/wlGQwb33K5KSuvZavS2k/GIBoGpJdyY+6fN43rSWiUVjsiVfXSrg67ewirzQNLfryb7LdYozA99nLo+r4I3oTihkrFWmAvZMDFwKWlAINfFPbHca5t/OkRm8CpmA7XeWjIqZxkLg5b/e6yvapY0Or5JvcT5wZf/IqB97MDaksL3KYzHJrSW98TjqkahQQd8WPeBHJwkBTWizoNhgRyG/guTSnxyImFl0TXoPvV+wwVvEBn8ILCfjajYgnZrF4ccSozEqdjtCiefppnZ4JJBx6vI2RHd10hqMRR0lUUKrmanmjOnpA2YGy139JYVWPFRWYyl+ubTq4eeR9HsmlW47nAiRi6idKrIqUzytEfMnDLlCGbFDdyosSZLAvwBjSzGnYjf9BtqLoyOI97kd+r/DrBZ2nT5mMyO/c+PNPy7Pz49NCU1hYoz/m78GJfoyfaKoaF0YTJk6H0w4jzSDvpttHCrujiksazXpqnyd+kP/U+q0yfi31IXOMJdkOXILFN4KYYnT4+OEQMBaFpkOA/J315ev2zy2neZndH04gN/rfgKH6fZxPu932M/tj3+JKvPZTZN740Fxyc2sjLUVvc3Xf+fuc3Qr1oPNPqwkfyfMQbtftjnmo96BOFwjzm7BFnBj/BIY1+b8fpCgoEXjCZHk90ONWRRqAXnc6eNfZ8zoQ6Q36RaVhsm+CNGYqG4irt1Kh1czzvcy2sXSr1Qh/uBPTOukv+oV3H40Rcn6Mi1K01sRnuVUj/XihPxt9kW2GrSNhxjQIz2ZoKrtocDCEyZTRJuYO/YaRWrCgjGMz/XBf+k3TICfgV+XWSxsyQoLGD5QQ50v0A//294qSrxKCIJ/TrO/3JNihKoD+CBp+pTniR8ICtSbjmQQ4wLY+wyycg0p4+iTbaMebx9Z4IHbeOVMl8cW9BJO4JLe2zIs3/ZwVROH945XtMUCioZC7FqpePsLEueJPiyCI7xD+y3Db4FXYOjQGVRCZQOFM2dCac6A3IvI1dqjD+q33JUJm2UbJcIPpj1jl/tdxlullMJ5RPg6TSTzNMddE5NtkayFRUUfYfZJq76tJPkXjR15Mfkmrv9W/SgvxUoTjOSSnuzPBnQQyuz1k/OKglH2nHhfD0n3tPRSpcCKvwz6SHlxpHBo1l0keLLxco0XHEf8hmcivJvNmntSlKtN278NYmmnO7GZTh575m6xvpAdIEO3RoCY5TcvNMmUbkRDvt6QtTxaghrRHmlsYfbLyKH2X5WxlH2PVoOg0mvvDgWK88xkWBbkv54Ltu5n+uLRi33ZP6QYjv9Pm+7pjxeC+51pgv8WznjB/V75jlNMGemh8g/zAUzS/9BFYHDjGag/NifDM1hm9y/7CeCj8cK9uwDtft9TJ8kpFDh9FecWKCwFsH6wWMTTpMJ6kyKp38+2il2skGOaBuRWSjvkNw9DLQN468t3XALF3LuwXd2v6UDSLmI4uRDRc7oQjCr3NWtoouxR2NP7mQPneg2s8YmszHXgKXkqifnG+ku3MWErXeJ4Gm20H9JIcrK6tAnP2iMAuFbUzTCXmAR0pQnjUzi+5dTd5FCSJ19owEmlIehHkkph0b8VMaqUBwAABlc3i0+aHDUb9pRPE3UGuNLbGRdxO9bAHn37SWczKDk8mxLTYDCbnIITME90cNucFE2+icuQs67vLs6wrr6219O6Mx82ysAnLBn4emKDmw9aUfSxszjMpmKd2urfBzcfnO/gKzYqUcSforYA8tlmrYDyuqDhIG6FmkVMVkuZnYWbKFYoUZoGeDo4qtxPJS937B4tYl7vv1529/bH31Kg1TeyY687+Dp3ZJJbMrbyeASO2x8f50YlYRpjytOOYHyVgPcmhjj/VRPyZH2c/m4Rvi0mHBmAcVTUKTokZjIQCNxYl2WPv8qKEgtD8xfd4kgsfqBwXkmxCHuv0NYyGWdIcHikTTi5Ya7lh6jHCH3xI+mEFYHPNbFaf4nV0TtD3OueMbgui+VDvM7m53tTzJXF8NRbmYnuF3al8WKe1DobrUb66puhUWr56UiDsXD4w6bK4nZPm7q3FFeYtR+Ll+KXCwJpRjH6heWTKuBb4vWcupL34gXW3hnRV16Y+jbpzjk1FKa/cjHCzfiP189FPvb59uaJa3CIxU893BfLSMahNkA4lGnCAIpWcwe4w07F4U2xQOAEJnRgM7CZNE7fHU7x/TES+0NVd4eCC8RdxRs6uDqMJPe5vHweTD6OrqoYK4iOB9we79+EuraXDOYqxL5tpaTOJ8OV0MtXVg0UAsZXK7xpS5/apeOlF29L2v5bHkpqYupq5STHw57CV6WXXIF8qow4t2yQPO9wpTxUSlLieO+9ydpVys/rvNeJI0YvFK8YYICgxdBuxizie3+ecuJ6ocklEkzkXO5BdUfEawxyRZZaRo1wUnHkgj1/Czr2RUkVNqsoZkMDmUWYBizm2AVBR184FovSgsQx+sPLHnlfQtY1NQI9cEq/t8p0gFMwJn4KNUoitHDqKj/2qNLQak1KfdVLDzyiqfJ6KH5dyoopaUaY+0M2zHz0u2uKwp5SKRrG9VE2j7tDfMBJkQi2uhV8ngIflCLI40V/5PHqeVFOPdv6lZ+WzIVhfeGja0KVeqvt4oNmfWag5JBW9uATJlNOruwI3a7JlrbKFVUf2MfQ8fUHAYm9hIKQPuniuU6L+Zca5hLSGYtTJpAU5J8M8mUcaNtvuEyoec3SzSlPDm62hGNxJAtxDtrR5l3OZyS/jqCLvhpDNlyplhCDpLM7c3cLgaVev/uMSKs5q82lEfed4tMUkrMvBrqeJLxcZu9jcATP7WRdwVro/ZVi8j/fd717ZxZqi7gCFxUL9IRDquQd4WZqmMHAUv3EX4qcPJHV+TGlcH+G/e00DRC0je+OaFC00EkuPUOZ7M44i8FhiuGcODOPKspwZ89Xg5dd6Bs5iiVNUX2alipy5MPnjpDhJ4jqlSR+HnXxvoBf3p9W5En0zOid3VklY+XONIFZF1gTGys6P99NM8NF0PlX8hiXAL8TAZuEXXnaeR/g3pkoK+DsW5uxrf/7kKphdr2CqU5VKsYJopKfkzlQzl2RZSoHH1R1Xmrzxq/tgAYNXARCx8942QqGTV4tEOT/VoNosVKcMCQ/k9GxjMoT9u9YwIUKnQiIN/fYJWUenyUvMvxhhjwNHABHSF1MCbStn3b6LLpgbs1iwOb4TVRh9DLzMdeScftwFfNl8Jlj869G2V3LxfTslY5hVjjv8nmpwnL2/UpZ3Wss1d4I1jT+alpKlk29QSA+h/5yd8S3DMpIWO3ffOJ9gM/pWNBiXLAJ5zdDjOqfH42RmGohAV8pheroswma4sIUW7mwkJM14ATiQNyhiOPnUj7jg7lcbsTqGiQw4zCmDi9sR/ABtSRpJp21/EX0Qpu+/NPLxu8eOmziYpYmsf50OF8svNKRlM2BaKAPrAZc8DcAeNiiGGvzC89R/+yIEx6u0UVUVCYKNOPg2JaeEdMz7pdlWzOMryQN8ZcuW/jgZQ3rF/F6WLqcLzjNK0CvUIx7kwMdxqbbP3gRVj6fQbvfQjm1kFvn6CVEb0qjMH8Or54FXVwmBC2TtZQ8vNaM+XgQZVXwpiyO3RW8ockVZJ6yd8KU2hXJNy40G6plg1e+M8NjAj77GZtSVfkvew6/ZQGaZRY8YL3WexbHvCvwFXH4kpQn1+qS2X4sg2tPMCinG8+UAyUnDvcDWC2T9JdG6Ft3/pRvKghZ7K8yut9IZfwmMBwhCC2k/5OpjtpS6hyTDiNmeqqTBXvhdsb+ZlOd066UhcXdeEuH8NT7cZ+XBX6qu44SugDnlQlkY8vpYPlf6k0C2d+h73veqKz/x+nhE1uIK5o+0veGBNfRItyhSN2BWlqZawgrn2PP0CSvJuHyonBEqkSeHrP6yLQTbsp6NSxKj1J1DMjuSvu+Iic6ByM6oUua6wdii5cOhFuqvudh7P+mpSheTuerJV9xThHLQhOG7VdpW7mTIfg7J94W3EA20ZPS1Xrd+3KpmUaauWkhVgJL461/X8WO90jJhbosozQLjxtMIqIxdHSY3j7WlhD+u8WKjJQNH56WQTroE+oRPRLLQkpM+FQEgNYupTjnuGTCEHBw9N06Cq5M0P0ffYCXLWb3xVeEOFyP+ijVi8oaYcSTuA1U6Zt+D2Uo/sRnCYz+ROLpdNJwdqhuKEANM8sioFsdHxVhOh68uumGo/LqyKH3eZv2+DJYJrOQzIuPCiQtuxkk911PKwHNLE6mfxx3TZGRd1RbSP3u123BJZm6DFAS3+TXPAjc9xj0XheT8tfhHwLAe92PRJ6j2nPLG35bN/OuhbvWeSQI8XZsvH2IOrpRXcgmXTIVNj2qCUzChyV+Ro7H3oLMhchLXnFYY87ER28QDjjlmuTJBgt5fxBdpApv6X/hSU1gkX2S3vZgRGdqZpCbHUyg+8ED8rGRpA7UIMR6pTUFY7UQJVTPqGkyeqqMRs2CjvWxDfzGcWZnSPiodDsJiedRByxZzslCYip3rj7v6Kf9Rgy2jKcUJvbIlcVbwOMfk3Yk6Mx3lunCoZYpbcGNVV5E53Csj8pAE75TCX8DYMKOv3224kCzYyfSqrbOBJ0G1yfLf1r58WavUbR1xmqnpZ1R48q76IJcpxwZlLcejVDe76D5/C5aF8Bu9gwEkQaV7adiSdvdHE3RUANQxemU4VU8AjxcoqFoWa4wAZXw+mBh03xJPpC6ZnTsaZa0jDzcBSqPcQZ9en5UEt3DrDvCsQGHwTvVxfwlxrlbRUMmrWSbM8oCvdU6J6I1N6YU8PFga22DMwur8Vu9KZXXM62I5QwTxTxEGtAH1xrgxGUYVozGsM5ifLmDGE5XtaYnMBOOznZP24Y/r9e4H6GnlwAosT1/P0lfQbFBTi4PqkQ1NeLcfOSi7Mxd6VXO6lPk47sM61p3Wnn9CvfULDpX19gdRKwWhR40u9dRLv5XuavRs9r7Lnlvpzt/FmqHPBMXLYdpN2j0mVmwiBNfpXq/QkoRnBzKH4/QcohPuXJzCwix6s4P4ZeAkuymGpD6u0TlZwbdFFASIFhAFWx2ph9P52F3Fua6jCC08tBGkvfL60jGZQcnpGOM+VYB5qNfTh2kIq8sz9qvlPdKFPS3vtpa1ibBVbm37SWWKOwWFS14BMwftpoUY+civZVmtYwZ4Oxv2V7Ney8nXJnO+1rM+9YSMHManfpJOfHUMIAsJCdwbCrrZXJ3Z75tXgnh39+cPV3S/3plHL5bMkZsXMGG9PLS0m79pc7uhDSRDOVmR19sEgAU1or0iiLggZy0YYhhl3ABKjMadarGgW/qOjU4Dl9LxgG08zjtHaEOmin9gx5P3KN7PzYcmQr9g7zLbmUrxNQzYo4yCSVYiUr6dazYnxzw8TJYC+lGdMyMNgg5/RMyJBlz2aBQ1FIV8bPKCMlRgxvDmR08iO6ylZWHmVOqDkM3Gfr4Yov23Np+Nm+atO3FAu/sKT3oYe550FwzXRBSTNg/TNzzb1VQR2STao+vBs9LgJNlNZ9AcC5HY7eNqwPk6mYJBBDHDRvlP0cP8tPFwgfzzlvI2feIWCVyFu/oA48TZibZ1ks765p+vPheu3y2aRkJdbkdfVoyTL9WImXMWsr/LVniRlYlvf35pnohtbfIWG2z8WA+C6elHTnxMmi3hV8/L6mEvemhpUsSXPl5i3hU+z3njjG668Lr8vozx4iXcsGAfpobIkqP55U3FOdtVTi4fulAYPo6soYkC0icJ71Wt5lRjeNji6IasikpLTbRPBz1aJUTFjrjoSbgIGLBXCuWd2n1fH2Ll2l1j3E5Cv9+osiqSjO+4PYASOURwV1SApGcCOMjm1BJiFdl0cyhV77SiRPvlEhR+zdQckn02HReXUp/iIPduui9NGaIloan7lkytzvGa8tNyxl6USiJ8gTC5fVU/PcbcXVVK1QD64lweSmpWJo2+DYhfKMefnHxtVsBAWPLLi1x28WWVtB5SBSNkcV2/4MN9Z4Cu47cC9+pPXcjlDJ+onXJNd6yBYzBEeefrTNZThgE0qsE95LvLLxlV8Nc6JLyjAmmVZtUYP5cbESRx5GegyoxvGw8/cNDerSkwLXYnOdq+LdP/Rc8Ly+nrfFlfg+Ii91Ld44WiYJ+8aoXdthS7zNvsTBa8lAWhHe0pvzVpzU/l4dnnGtxw6+AX5xVp5gY4DLfN303zkm4QPLLYXjaMqkr+wCnf6jlyOc5E0oeYCymyw9QVAvlkaf22+fYXf9GLCPfGFjmXBRE7Kc06i8ixRewgEN342m4nK21a1kxeGgeSZIDZmEo4d/aVi+Wcxw81k9ZwIWTfvYUXGzwuBY+PzIIiQjUj9nv5u1ACP05WCG4Evtslin/dgqSJD6E4YBLmnm8QWDWlNkVHfFIHaBOKMszPA0LB8XIMrJ+Za3GUGXXphD43BaMp37VlfMft2Unvg8/BaLBOtU//SkXxk3jw3tLlC8JjEGmsGCzxuZ0UHbWXMhcCLZJELi6X+ZM/YwhNa+FdNNjgVIGdImSy2MH/Fe3q4jZfB1p+dxRUjvl+y2CeDxBOFuL95JsxfTsaa2MRSErIOi95dRz6Qs74c5bQLWJBgzQNjT/Kx7h8YfFuggneRzmyclTVIZ/mD1Xp/pUs3/prVD40Iwh++Ay7qiTRzrJfd2Fzs+rB/n95Af6/wP8TAhb2IDMI1MnBDGIH8IaAXKFOkN9/GQcA/hdQODAkCmVuZHN0cmVhbQplbmRvYmoKMzAgMCBvYmoKICAgMTEyNTcKZW5kb2JqCjMxIDAgb2JqCjw8IC9MZW5ndGggMzIgMCBSCiAgIC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCj4+CnN0cmVhbQp4nF2STW+DMAyG7/kVPnaHim9YJYQ0dRcO+9C6/QCaOB3SCFGgB/797LjqpB3ATxy/r8FJcuyfezeukLyHWZ9wBTs6E3CZr0EjnPEyOpXlYEa93lbxrafBq4TEp21ZceqdnVXbQvJBm8saNtg9mfmMDwoAkrdgMIzuAruv40lSp6v3PzihWyFVXQcGLdm9DP51mBCSKN73hvbHdduT7K/ic/MIeVxn8kl6Nrj4QWMY3AVVm6YdtNZ2Cp35t5fnIjlb/T0E1RY5laYpBeJMOCPO08gUVFvWkSmotq4iUyAWbc3aRriJLDVNrBHPmj2bg+QP3Ev8C/YvHoUfuZf4lOxTW9Fa5lK4JK4kX3G+knzF+VL6lty3KiRfMEuvinvVjfg0zPJfFHhQt4nwyPhs72ehryHQMcQLEOfPkx8d3u+Inz2r4vMLQl2gXQplbmRzdHJlYW0KZW5kb2JqCjMyIDAgb2JqCiAgIDMyOQplbmRvYmoKMzMgMCBvYmoKPDwgL1R5cGUgL0ZvbnREZXNjcmlwdG9yCiAgIC9Gb250TmFtZSAvSU9YTlBQK05pbWJ1c1NhbkwtUmVndQogICAvRmxhZ3MgNAogICAvRm9udEJCb3ggWyAtMTc0IC0yODUgMTAyMiA5NTMgXQogICAvSXRhbGljQW5nbGUgMAogICAvQXNjZW50IDk1MwogICAvRGVzY2VudCAtMjg1CiAgIC9DYXBIZWlnaHQgOTUzCiAgIC9TdGVtViA4MAogICAvU3RlbUggODAKICAgL0ZvbnRGaWxlIDI5IDAgUgo+PgplbmRvYmoKNSAwIG9iago8PCAvVHlwZSAvRm9udAogICAvU3VidHlwZSAvVHlwZTEKICAgL0Jhc2VGb250IC9JT1hOUFArTmltYnVzU2FuTC1SZWd1CiAgIC9GaXJzdENoYXIgMzIKICAgL0xhc3RDaGFyIDEyMQogICAvRm9udERlc2NyaXB0b3IgMzMgMCBSCiAgIC9FbmNvZGluZyAvV2luQW5zaUVuY29kaW5nCiAgIC9XaWR0aHMgWyAyNzggMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgNTU2IDU1NiA1NTYgMCAwIDAgMCAwIDU1NiAwIDAgMCAwIDAgMCAwIDAgMCA2NjcgMCAwIDY2NyA2MTEgMCAwIDAgMCAwIDAgMCAwIDAgNjY3IDAgMCA2NjcgNjExIDAgMCAwIDAgMCAwIDAgMCAwIDAgNTU2IDAgNTU2IDU1NiAwIDU1NiA1NTYgMjc4IDU1NiAwIDAgMCAwIDAgMCAwIDU1NiAwIDAgMzMzIDAgMCA1NTYgMCAwIDAgNTAwIF0KICAgIC9Ub1VuaWNvZGUgMzEgMCBSCj4+CmVuZG9iagozNCAwIG9iago8PCAvTGVuZ3RoIDM1IDAgUgogICAvRmlsdGVyIC9GbGF0ZURlY29kZQogICAvTGVuZ3RoMSAxNDc5CiAgIC9MZW5ndGgyIDI2NjgKICAgL0xlbmd0aDMgNTQ1Cj4+CnN0cmVhbQp4nO1YZ1QTa7tVOgnSQemDdBCS0AlY6IiiSK8eQzKEQEhCEmoMXRAEQfBgARULqKiogIAICAaUJl1RQJoUATmISG9f0HMsn+fXvf/uupO1MrP3fmbP+zyz3/yIgqytvboxhugFWhAJVHWEBhwJHML5ewVS7FGEg+omRDwGYJK6EAUFBxwVD/6mQhScQDIFRyQg/6kzJYMoKpMwQ1GZ5Q4+gYBxIBYANAGENlJLF6mpD2jC4Xr/FBLJSMCbTPSCKJgSSaFkHNaHigS+XwLKjnbOKmpqu34wCAMDA8Ar9B8FMAMpOCwBUPzFgMkGgXgiyR8kUA0BUyaNx+PQABYfSvKhACgMBsRsejih8KAfYIHD40gkYhCgbKryqwtzoQh15pcORAGwBAkgmdnS1xs3h2VBJGPBr73A9TS1AGUfKpWEhMG8mZL3pqRB8dYggFQY01PBnIAxJfpvroYCgWzebIYjg2hm86Gw/562H4EYTKD9RnvjCJhNawATSII5EnABgeB+s2/FAIlIArxReAoI0Jgq5IeMBamADlwTbgDXBsAAAAxB+8A2n+4QSgK/iohNGkXA0GnfPeg4b5B5gtAoqCAQoJIDQTrtZ+FXBEEgAAwOTQW8QCyOAPnhzqRB72/YBkUl40IAd7gGc5wAfPPz/cqTmQEMkYAP/VF+COUPAjBTFI5M/JZJdfgP0cSEGALQ1BF6WoC6FlwPQMAN9AADPQOA/osR8G8HBGaLwv29vJ8s9xO8iUyfb20w5/d3KwAs6FuwAeXNYKsAvz4AdohIxaFBZma+J9MDrgNnhpJ5QvxrYn/Rv+f2fxbVTS/TTacfGf1thRaBePzXYSp/CxPATBMFOAhs5un3YpQ/Dh/6L+W/VTqD33bmv9vY/zReALafimI2Y0zA4n9wOIoFLgTE2OKoaJ+/Y/tNcCRgQDIeRwBtiRTc5i8IoI7Q0flv0cEHh/YjgBQKoGvwVQMJmF9XATMnoIkYHAELaOroAigyGRUKgTMDqck0oyEA5kYCQ77uBQCmQSBSmbcApEAqHWDuWsjm+9fSBGAUEor5cpn0V0ZPD4DZfEfMxMFQ3xECDgdgmJ8gAoCBP0EtAIb9CWoDMJ+foA5zHj8gM4Uwwlf4S0PoQDKZmY6vAWV2+w/2xjGnCoIhIBqyFDO95dzHUcnZmxmpXYYrB7YoPZZpIBPmZk1F3O2pvAGGuz/xPDGruXu/rLXv/OGLprOTMGHpkgO+7pIzqWNeWVjIVmOhlfuzQtE95Bxsi8KduqCj2NqNysLrf2XHdeQFne9VqhGpowtu+3LmzqtLufD42j3qbdxHXpXZNjRinwRHv290MxpZnuJrWbrXg3E1Ha3wOj1YMRtfZDfz1HIXR8bIc27p2Qznm/5dhmHQDahTpI6BcqBtyUpnrwv39bX7MAFBpC51lsXazQjXfPz2wx0abRWfH9TQ09YL1W3uEULvbnl66FNVtN9tNzk+/OsJfdqJoUXF7YOlqYeFMqTerhOrKpcyu4SrZVTulD02mTpFZRMqz06Zht3y3xKXgXN+UMDmfANQ1G28N+hbUPQCGWux0fn2/dJ5Ha3OONnyfr4jt+SNBUu03dSD2SOjSay7689+jIo5Xn9iVreTVloK4UDMsUhuhI+ks7SLw4TF+hfGrE9nJKdxQmUDlkse+VGq+UczP7Q8GvsLbvLcZtVp6qi+l/WYsXoS0DNWQ2p2FcAc6Ov0sI+qg6xaxrzhHp/rlkIEvBblNswWP51+BR3UrOKWRJuqmmAXzlbBNsW9/gQdh2GbEnpKXHnqHEeC9K+ZcVT7sj57LlXI1Wi5vfvyaoqTv4SV5If+OP28OoJu/nFGVyT1NPTjCztPF1WHqNBxUdrFkfWYvrMvPS+If+a2OI5YV0mzVR7i6isbloSWd7R3rz2b5k/R3j6b0FKkUfGy/6DGcKStzzv+sYXHeu468u8cy+/ONPv6Mi73LU/LhrBXLwj4ylWdi8fZFG1M1YemWcopt5tK+ux3iZd6Rl+k6b160P4IYVkdZL7zo93QrqHgfrsXNa4PA9ZHzGfQ/LyR02VhhdJa7+x1SowJRxliR4YaNeos25c8JjzdV2vpdV9k0133N6RqPCwWLH10zVktMvx8pgQ/5cTu6oAvbYyFPhafxOyoN5R7V9VPaCsEnNhXwOpvhZmTOp0YtXZHLUlhfEJ47GTylIEynYszRBoHSGC0XFn0OXnMEBHW+SLK2+LN13V0J8yVZM4t5dRHXOTr7suWw3IVhU+KDoX30nJ5h8bviq8o7SU3bQ/LCO46GeCWMIF0WUhkU+3qMfwj62D/IwdeY7Y9bG1jr5JuGQa97PqszDfTdUcZ7b73s4YRaPYhJNhZ2QB3i4A8sK4qt23Hsj+LhzbVfE+XiNTKMZnqZjXC2qW+Vvd3T3O21ZywjYWRRQ5/2l5S6obcMROH64B9Chds9X8WD7WOs7oQa7uqJPKOXirHOXTtZuLd1IZpydU2rAI7+jBFvIvnJXGlfzSkfeTosKZT3Lm1tNv8EusjPcatg8kwduXRh8cT2LZD2fyU5i5/3lWe1GxoJ+ZTSghEq95wj2hCVT7tlj/lIRIdPn/Z2PGFtPDjIrmKd196Yjx4uDJutI/mscxO60c0M2pY13VchhH1bkB5sl64XhMExq8xuZQ+D+QZd6ixVsYMnhnIe5VJp2a/SeQdwOlKdsrksvZwzTc1L4p3OUcUZK2sftFTE0QWUkGTrffuS8hH7nF15Vo5pqC58sepEZFVE9+stXk48WYxX8mVKKE3BpMFpHYBUyPuNxd75vfuHG7565S/UxNpqDGhsDL5ZU7zgzGT+JXzfpLS4cczvV3E0K+768Zc4uTHfS9bDfa3RewHj+QfT9d+H6xNDwsLKOmbap0clnvda2LEOz7QkO+Dfa88s3jIFvnyKnJZK7LalGFDSXQgSlaU8wrpuPsPHY1KcybfXG3tN866uhp7sWdRb5SHV1RuI1POMOd9Lcwpv7iSFVeiTHQGjg7Asi8eCeTZbb/PILogwPY9yz1IAWtdNif7EddMXp+MMuPBBz7IjT8CXlirebbv0HPIh161RXxYTHcXNc5dbVtZmZ7QT5d9HHzykJLztXNq0tdUS77wHAkqHJUe3jvrbVhwYWtyvlucI88580LC2yMBqR7FRpJWrap5cyc5589UKTaezopk7dh2icCrUM1l5RW2ctyMs+hl5K4U84nafTHz9gZfGCpS5/vP3eq6F5EvVrlilyZqeaFdq89CgqpQwl/yvJUfMpEydHq982O8uHSmEcc+G7dYfGq4sNFMG2Sw8bSYr6BSY4D1yJaTvSaRItENxAEOXehZmV0ST47lOcfX9l10alGLtVJKDlrmnPLLqxcv9TpIv2Qcx/2xQua4lKr6uaNPlpOatzy98bayiPcQXfE5D/uONvSNdqXcBAb7571ne2Vp4iEvoVXFiukT8dGU5SLRjDIDllS4gO3dnbTEnhvy9H3KoS+cJV++6S6rKV+Tz3YVUW3IORCcYB9YjRMenI+uV+XdopjbWlB/1FfDIlAzIxS1x5W99BmdwKHOqPfjlomb3HclSD5xF0KiNqS4t8BZANWfkkgY6/XYbcJQFfMbOhbKq1yBHCEFOxD01j70ecZaJ/sKdpX7r7yuUR+4/jhSM1+hirvjLrCalX2tyBS04pOHYHQNo7dBV04ktN9eeFFpn/zKSSom9sBJo4W3eVtbOPfd21e7tryH9Ccg9sWfcq1GwPoCkiF1JnmOFH69UPdW+a285IGcmsXs8eujUI1sDe9JSUaD8pTJVLdQ+HPAqKUk4tPGZ3PCbJkt1T3qbVaHjbAGgzJIYvSt/XEx0jP2cq5QzDTcblZgt85BNmxG6QXB8jYrpEDiszBKDY0xcWEjvRi0eVJ8arjuj0TVP9k66Ye43Rz56msbBucgGLQVxVqzvNLqao7J4/i/UIWPw/J6dsp1nrA18jN0QKUbxq641hwwS90dVJAhkI1scqEs5A2YRo2y2LIYHo5MjAYc7FKrIhXNDeC0EL03HmY53KEuZ8tGIDOnZpQMSp09u7MGnrUdepujQafdtivpH+XtPXZlr63Jk8mlBQ7q4MnXjHSJHHNelRloeUQg2bVfpxJfNZkv+Vgm8ChO+mbuvFlSw0RSAdsYQO+oyqoY1Nb5lNapFFfzsOrk+uHBpVVGs+TZeH83TxXy9Zvdzuk6Zdv3Dq4XT//JaTxmKDre4SkdwXjWMpgmdXl94dJlyLAqn1xTgWkKhA/mdsmuDsIhPKKU5Q+9iR0xUzNe/uDME6aa219mgwdujJ6Mg1hIEzvdBt6lqLcJczzbcTB0TUE7pM+zL2v7awfrVhcpXu+dKkMf9ROzu3i4y+f0/a8ioaBl1eycHtRxf2Bu8G1FRJ5QDePRfWvO6Arp65YeK7EGalDd+Uyo45ZgjpjGfvFlya09nucemE++fgtE1GqNTV2db3tbfaa51HRxUDTPukTVkFVWL8NhSpyf8lB/2M73tuZI1PEOo/2ss7v3XO+WTFtyPKjIFZ6nSVYStIy+mtstd23W/NiBMy7wP9Nj5LwLBURTmluHULL1UNYGbhjF9WY+WgE3Jy5hTe51sCu4aGA+gh/6SOVXwS6tpPR02VtGMu4OEsp7tq0+eKiIW0oQMpydEouoVzyPl0+s3SuUu+FOITuGEcuPdZ7nOnPhVJslr2y1DLuO2A6zul13HEaSkp7L0TpDtOyQOwSmtrOUFl8SS0VlVvmpx7QsNiX0vUta7L4y7gn/Xx6Q/zf4P2GAxoMoMpXojyL7QWhkkEIlkjf/FYBA/gMAcXliCmVuZHN0cmVhbQplbmRvYmoKMzUgMCBvYmoKICAgMzYwMQplbmRvYmoKMzYgMCBvYmoKPDwgL0xlbmd0aCAzNyAwIFIKICAgL0ZpbHRlciAvRmxhdGVEZWNvZGUKPj4Kc3RyZWFtCnicXZHNboQgEMfvPMUct4cNaNzumhCTZnvx0I/U9gEURktSkSAefPsOsNkmPcD85uMPM8Cv7XNrTQD+7hfVYYDRWO1xXTavEAacjGVFCdqocPPSrubeMU7ibl8Dzq0dFyYl8A9KrsHvcHjSy4APDAD4m9fojZ3g8HXtcqjbnPvBGW0AwZoGNI503EvvXvsZgSfxsdWUN2E/kuyv4nN3CGXyi9ySWjSurlfoezshk0I0IMexYWj1v1ydFcOovnvPZKWpUggyTD4WickQ15nryJgZiUuRmAzFLzl+iXzKfIpcZa4inzOfUy+3W2NX8fnu46rNe5o0vXEaMQ5nLN6/wS0uqtL6BfOlgkAKZW5kc3RyZWFtCmVuZG9iagozNyAwIG9iagogICAyNjUKZW5kb2JqCjM4IDAgb2JqCjw8IC9UeXBlIC9Gb250RGVzY3JpcHRvcgogICAvRm9udE5hbWUgL0pIUVFMVCtOaW1idXNTYW5MLUJvbGQKICAgL0ZsYWdzIDQKICAgL0ZvbnRCQm94IFsgLTE3MyAtMzA3IDEwOTcgOTc5IF0KICAgL0l0YWxpY0FuZ2xlIDAKICAgL0FzY2VudCA5NzkKICAgL0Rlc2NlbnQgLTMwNwogICAvQ2FwSGVpZ2h0IDk3OQogICAvU3RlbVYgODAKICAgL1N0ZW1IIDgwCiAgIC9Gb250RmlsZSAzNCAwIFIKPj4KZW5kb2JqCjYgMCBvYmoKPDwgL1R5cGUgL0ZvbnQKICAgL1N1YnR5cGUgL1R5cGUxCiAgIC9CYXNlRm9udCAvSkhRUUxUK05pbWJ1c1NhbkwtQm9sZAogICAvRmlyc3RDaGFyIDMyCiAgIC9MYXN0Q2hhciAxMTAKICAgL0ZvbnREZXNjcmlwdG9yIDM4IDAgUgogICAvRW5jb2RpbmcgL1dpbkFuc2lFbmNvZGluZwogICAvV2lkdGhzIFsgMjc4IDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCA4MzMgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCA1NTYgMCAwIDYxMSA1NTYgMCA2MTEgNjExIDI3OCAwIDAgMCAwIDYxMSBdCiAgICAvVG9Vbmljb2RlIDM2IDAgUgo+PgplbmRvYmoKMSAwIG9iago8PCAvVHlwZSAvUGFnZXMKICAgL0tpZHMgWyAxMCAwIFIgXQogICAvQ291bnQgMQo+PgplbmRvYmoKMzkgMCBvYmoKPDwgL0NyZWF0b3IgKGNhaXJvIDEuMTQuOCAoaHR0cDovL2NhaXJvZ3JhcGhpY3Mub3JnKSkKICAgL1Byb2R1Y2VyIChjYWlybyAxLjE0LjggKGh0dHA6Ly9jYWlyb2dyYXBoaWNzLm9yZykpCj4+CmVuZG9iago0MCAwIG9iago8PCAvVHlwZSAvQ2F0YWxvZwogICAvUGFnZXMgMSAwIFIKPj4KZW5kb2JqCnhyZWYKMCA0MQowMDAwMDAwMDAwIDY1NTM1IGYgCjAwMDAwODk4MDUgMDAwMDAgbiAKMDAwMDAwMDU4NSAwMDAwMCBuIAowMDAwMDAwMDE1IDAwMDAwIG4gCjAwMDAwMDA1NjMgMDAwMDAgbiAKMDAwMDA4NDYwOSAwMDAwMCBuIAowMDAwMDg5NDEzIDAwMDAwIG4gCjAwMDAwMDA5NzYgMDAwMDAgbiAKMDAwMDAwMTI2NSAwMDAwMCBuIAowMDAwMDAxNTUyIDAwMDAwIG4gCjAwMDAwMDA3NjEgMDAwMDAgbiAKMDAwMDAwMTgzNiAwMDAwMCBuIAowMDAwMDAxMjQzIDAwMDAwIG4gCjAwMDAwMDIyMTAgMDAwMDAgbiAKMDAwMDAwMTUzMCAwMDAwMCBuIAowMDAwMDAyNTg0IDAwMDAwIG4gCjAwMDAwMDE4MTQgMDAwMDAgbiAKMDAwMDAwMjEwNyAwMDAwMCBuIAowMDAwMDAyMDg1IDAwMDAwIG4gCjAwMDAwMDI5NDUgMDAwMDAgbiAKMDAwMDAwMjQ4MSAwMDAwMCBuIAowMDAwMDAyNDU5IDAwMDAwIG4gCjAwMDAwNzA4NTcgMDAwMDAgbiAKMDAwMDAwMjg0MiAwMDAwMCBuIAowMDAwMDAyODIwIDAwMDAwIG4gCjAwMDAwNzIwNzIgMDAwMDAgbiAKMDAwMDA3MDgzMiAwMDAwMCBuIAowMDAwMDcyMDQ5IDAwMDAwIG4gCjAwMDAwNzI1MDAgMDAwMDAgbiAKMDAwMDA3MjUyMyAwMDAwMCBuIAowMDAwMDgzOTEwIDAwMDAwIG4gCjAwMDAwODM5MzUgMDAwMDAgbiAKMDAwMDA4NDM0MyAwMDAwMCBuIAowMDAwMDg0MzY2IDAwMDAwIG4gCjAwMDAwODUwNDkgMDAwMDAgbiAKMDAwMDA4ODc3OSAwMDAwMCBuIAowMDAwMDg4ODAzIDAwMDAwIG4gCjAwMDAwODkxNDcgMDAwMDAgbiAKMDAwMDA4OTE3MCAwMDAwMCBuIAowMDAwMDg5ODcxIDAwMDAwIG4gCjAwMDAwODk5OTkgMDAwMDAgbiAKdHJhaWxlcgo8PCAvU2l6ZSA0MQogICAvUm9vdCA0MCAwIFIKICAgL0luZm8gMzkgMCBSCj4+CnN0YXJ0eHJlZgo5MDA1MgolJUVPRgo0MSAwIG9iago8PCAvVHlwZSAvT3V0bGluZXMgL0NvdW50IDEgL0ZpcnN0IDQyIDAgUiAvTGFzdCA0MiAwIFIKPj4KZW5kb2JqCjQyIDAgb2JqCjw8IC9UaXRsZSAo/v8ATQBhAGkAbgAgAGgAZQBhAGQAaQBuAGcpCi9BIDw8IC9UeXBlIC9BY3Rpb24gL1MgL0dvVG8gL0QgWzEwIDAgUiAvWFlaIDQyLjUxOTY4NSA1MTIuOTI0MDUxIDBdID4+Ci9QYXJlbnQgNDEgMCBSCj4+CmVuZG9iago0MCAwIG9iago8PCAvVHlwZSAvQ2F0YWxvZwogICAvUGFnZXMgMSAwIFIKIC9PdXRsaW5lcyA0MSAwIFIgL1BhZ2VNb2RlIC9Vc2VPdXRsaW5lcwo+PgplbmRvYmoKMzkgMCBvYmoKPDwgL1Byb2R1Y2VyICj+/wBXAGUAYQBzAHkAUAByAGkAbgB0ACAAMAAuADQAMAAgAFwoAGgAdAB0AHAAOgAvAC8AdwBlAGEAcwB5AHAAcgBpAG4AdAAuAG8AcgBnAC8AXCkpCi9UaXRsZSAo/v8ACgAgACAAIAAgACAAIABQAHIAZQB2AGkAZQB3ACAgEwAgAEcATwBWAC4AVQBLACAATgBvAHQAaQBmAHkACgAgACAAIAAgKS9BdXRob3IgKP7/KS9LZXl3b3JkcyAo/v8pID4+CmVuZG9iagp4cmVmCjQwIDEKMDAwMDA5MTE3OCAwMDAwMCBuIAozOSAxCjAwMDAwOTEyNzIgMDAwMDAgbiAKNDEgMgowMDAwMDkwOTU5IDAwMDAwIG4gCjAwMDAwOTEwMzMgMDAwMDAgbiAKdHJhaWxlcgo8PCAvU2l6ZSA0MyAvUm9vdCA0MCAwIFIgL0luZm8gMzkgMCBSIC9QcmV2IDkwMDUyID4+CnN0YXJ0eHJlZgo5MTQ5OQolJUVPRgo=" # noqa
# cat test_files/correct_letter.pdf | openssl base64 -A
correct_letter = "JVBERi0xLjQKJSDi48/TCjQKMApvYmoKPDwKL1R5cGUKL0NhdGFsb2cKL05hbWVzCjw8Ci9KYXZhU2NyaXB0CjMKMApSCj4+Ci9QYWdlTGFiZWxzCjw8Ci9OdW1zClsKMAo8PAovUwovRAovU3QKMQo+PgpdCj4+Ci9PdXRsaW5lcwoyCjAKUgovUGFnZXMKMQowClIKPj4KZW5kb2JqCjUKMApvYmoKPDwKL0NyZWF0b3IKKP7/AEcAbwBvAGcAbABlKQo+PgplbmRvYmoKNgowCm9iago8PAovVHlwZQovUGFnZQovUGFyZW50CjEKMApSCi9NZWRpYUJveApbCjAKMAo1OTUKODQxCl0KL0NvbnRlbnRzCjcKMApSCi9SZXNvdXJjZXMKOAowClIKL0Fubm90cwoxMAowClIKL0dyb3VwCjw8Ci9TCi9UcmFuc3BhcmVuY3kKL0NTCi9EZXZpY2VSR0IKPj4KPj4KZW5kb2JqCjcKMApvYmoKPDwKL0ZpbHRlcgovRmxhdGVEZWNvZGUKL0xlbmd0aAo5CjAKUgo+PgpzdHJlYW0KeJylU9tOwkAQncS3eeYXTMDEZe+XRw0gASNimmBUHowGiClo0f+Ps62lUB5MSje7c+n2nDOz2wwF4xjHZeF4He3bGjPkjHMRrDdc5zvqMW2KZo2KS815/nlaBVo666OTYi2I6wpnuCGSDLtX6dfqlePyO9dSzu2yEfoKFxc4pZHtgYsIXpRJsMpoqRnXDq3k3rPgicV76Slpqlxa5YIQwha56uO95I400ipftrTy8n6WQbRBsuCsQWHJsUr+dbyUeJ2QTbbYHXxufiQKxVT+YLI4PLBAUwrmMSkOI3nHNmh4hCGNKZzBSxsmcAtz6EGfUjMYdzD5wH7SgExwx4z2Xh1RNseMBShXK+CcChjAHemewgjGpP2eSlFkJzQHMDyhBmmZC0EckZ5WglG1ErrwQOJv4tocmVC1097oI/zT1NpwgPZMei08QauDju42RS1qc6+Dc0xGDZVbJpyRdLFrXDvl//wuxetf/3Xa2QplbmRzdHJlYW0KZW5kb2JqCjkKMApvYmoKMzY0CmVuZG9iagoxMAowCm9iagpbCl0KZW5kb2JqCjExCjAKb2JqCjw8Ci9DQQoxLjAKL2NhCjEuMAo+PgplbmRvYmoKMTIKMApvYmoKPDwKL0NBCjAKL2NhCjAKPj4KZW5kb2JqCjgKMApvYmoKPDwKL0ZvbnQKPDwKL0ZvbnQyCjEzCjAKUgo+PgovUGF0dGVybgo8PAo+PgovWE9iamVjdAo8PAo+PgovRXh0R1N0YXRlCjw8Ci9BbHBoYTAKMTEKMApSCi9BbHBoYTEKMTIKMApSCj4+Ci9Qcm9jU2V0ClsKL1BERgovVGV4dAovSW1hZ2VCCi9JbWFnZUMKL0ltYWdlSQpdCj4+CmVuZG9iagoxMwowCm9iago8PAovVHlwZQovRm9udAovU3VidHlwZQovVHlwZTAKL0Jhc2VGb250Ci9NVUZVWlkrQXJpYWxNVAovRW5jb2RpbmcKL0lkZW50aXR5LUgKL0Rlc2NlbmRhbnRGb250cwpbCjE0CjAKUgpdCi9Ub1VuaWNvZGUKMTUKMApSCj4+CmVuZG9iagoxNQowCm9iago8PAovRmlsdGVyCi9GbGF0ZURlY29kZQovTGVuZ3RoCjE4CjAKUgo+PgpzdHJlYW0KeJxlkstugzAQRff+Ci/TRYR5OHSBLFWpKrHoQ6X9AGMPqaViLOMs+PsaT5MmqSUQd2bu4UrjbN8+ttYEmr35SXUQ6GCs9jBPR6+A9nAwluQF1UaFX5XeapSOZNHcLXOAsbXDRJqGZu+xOQe/0M2Dnnq4I9mr1+CNPdDN576Lujs69w0j2EAZEYJqGCLoWboXOQLNkm3b6tg3YdlGz9/Ex+KAFknnGEZNGmYnFXhpD0AaFo+gzVM8goDVN32Grn5AGQdOn/zUUV/SJ04ZOYwVTKwqr5Iq86QKnlRVoLpHxVENqJRAfOLl1/9dSYgvkcuZuAyT34YpdzhWXkKrf9AKaRUG2mHYSp4CpWKNRUBggcU+FXmNRbTX1VWm4jYTR26NQK5RycuE6wLWe3Lerjp6HxebLlPa6LpLY+F839zkVld6fgANz8SvCmVuZHN0cmVhbQplbmRvYmoKMTcKMApvYmoKPDwKL0ZpbHRlcgovRmxhdGVEZWNvZGUKL0xlbmd0aAoxOQowClIKPj4Kc3RyZWFtCnic7f0JeFRF1geMn7pr71uS7k7SSXfodAfSYCAJSyBCsyQsYd9MkEiQRRaR1X0hDLKIuIyOjCvgjqBDEyIG9H2J+7jCzIiOyyijuA/K6yAzgsn9zql7bwhxm5n3/T//7/medPO7p/aqW3Xq1KlT1QQYAFihHkTwzLpkReTe0FtfYsjdAMrkuUsuWPTq5TV3ovsY4qILLrx87pz3K0oAnHUAJa3z5sycfTj9hb8BDEY/9JmHAb4Sfxf0347+/HmLVlw2o/f3U9H/FEDv2RcunjVTePjKsQCXRtA/b9HMy5b4Gp31AI+ux/SRJcvmLIm9OWY/+rcB2P9b3geZiCz5YciU4hAE0D5FfEa0db72GcUTFb7A3E0GALbBY2w+PAb74Rl2DHPthL3QCL+HAAzD97oKfgPrQIFpGHIdTMSvjOG/YZlaIxTBvdgP98JrmPYcuAb2gZ8Ftc9hJawR/4S51oATusBgGA+L4QY2WrsYpsMH0mroC6PhIljC6rVq7UbtFu0BeBD2ir/XWsAOWTALv69pX8l/1t6DHpjjNrgDPmC3WB+HJNZSjynvgWVwp1grMe0C7SS2IA8uxTZIMAZeY81CAkufA5+yILtKHIql3K+ltOcwVQhqYR7cCftYbzZcyJOna2O018CPdVyGpd4BDbAHv03wX/AOc8jHtAe0Y5AJ3WEkvk8jvM6axdaWVa2DsMdk7KVuUIYxi+G/4UU4yKLsaWGx7JCL5aR8hfYGpEMvmIKtfRhzfsL+IVyD35XiC1KlNgRc2C+/pt6G5+GvLIsVsXFsqtBNWCxsFpeBBWvshd/ZMB/7+3Ys/X2WYHsEh3BAvF/aIZ1ScloPay4ckTjcBffA08yJbxphy9mv2JvsI2GoMEO4S/hQ/I30iPRHdSa+9XmwCG6AHfAP5mP92AR2LpvHrmLr2K/ZHew1dpB9JgwWJgsLha/FeeJS8b+kIfidJC2XVstr5euVz1qrW59r/UPrP7RibS1MQH5Yha2/DTbjm+2FA/A2fj+AD5nM7MyF3wjLY1PYlfi9ht3A7mPb2COsEWs5yD5kn7Nv2LfslAD4VYRsIU/ogt+osEy4VPiNcLdwAL8Hhb8J34kBsYuYEHuL5WKNuBhbtU68Gb+Pi3+VsqQDkob9XCxvkrfI2+Qd8jPyMcWh/soClle/v7+lsOX9Vmhd37qptaG1UfsrZOAYZmEvhKEcWz8TvwtwvDchx+2EPzEH9l0WK2QD2WjsmRlsAVvKLsOevJbdyR7kbf8dewp76S32NbbZKYR4m88SegtDhHH4PU+YIywVbhZuERqFN4WToiraRbeYIRaKw8VacY64Qrxc3CSmxFfFv4gfiifE7/GrSTYpLHWR4lJCGi7NkC6WNkufSp/K0+VX5I8Vm7JIWas0Kf+j9lEHquPVCWqtepO6R33DUofc+Sw8Dk9Auw87LK4SK8TH4UahRMoUXhdeR36eAbPFMQJyqrCNrReuZo1CvnyZMkAYwMbCMSmOff2CsEU4IQwQx7AqNgkWCL300pR0aTuSculZOCo9he/2OpZ8meJg1whfKw5oYCCUYZ3Piz2lhPgKvCN+wFTpXnhXsrEAOyo8LI5HLvgvaaBcDXni3fA7cSm7Gh4XKgBspywbkY/Hsu0oFyazYvZPUQNRGItc1Ff8CFbDQuHPcBTn8Xr4LZstXQA3Qgm7Cj6Fh3BWdJMvUgqVDPaSMF/aIKSxRhCkR/Dtylg+E+V0uJbVincqXwtvw8VwQLLB++Kj2PoDwu/EMdIxeSKbhzPgalgLS7VVcLlcLf2RXQAimwox6TBKt6vEYikP6UqUKtNRpu3B2b0P5cBgcQyGBJFzRiNfTEEJcSd+b0c5ISEHzcc5fg5KsdehUZksNMEFsouh1AGQXmmdCNO0h+AO7QK4SLsFeqA8WKddhSVug4/hJtjG1rReCUsgF2fO+2y0XCkckCu1HsIG4W1hkrDpzPHF3o6xIHyB39+hZ6D8JGyQ3oJJMEjbqB1C7u6KEvYOOB9GwRF8y6+whhFiMy42Y4VdWqW4BN/3A5igPayFmQ3maRfCOHgKHlRlmKkmcIxT7I/4vlfCHGGitkKc0zof++Em7IUk9tbFKH+uSw6dMnlwctDAs8sH9C/r17d3aUlxr55FZ/Xonijs1rUgHsuPdsmLhHNzQtlZmcGAPyM9zef1uF1Oh91mtaiKLIkCg+4V0cq6SCpel5Li0REjepA/OhMDZrYLqEtFMKjyzDSpSB1PFjkzZRJTzu2QMqmnTLalZJ5IOZT36B6piEZSrw2LRprYtAnV6L5hWLQmkjrK3WO4+2budqI7Lw8zRCqC84ZFUqwuUpGqvGTehoq6YVjcLrttaHToHFuP7rDLZkenHV2pQHTJLhYYyLhDCFT03yWAxYmNSmVFh1WkMqPDqAUpMVYxc3Zq/ITqimHZeXk1Pbqn2NBZ0fNTEB2Scid4EhjKq0kpQ1MqryYyn94Gro/s6t68YWOTB86vSzhmR2fPnF6dEmfWUB3eBNY7LBW44kjwtBcL9w2tXtc+NlvcUBGcHyHvhg3rIqmtE6rbx+bRs6YGy8C8QqyybkMlVr0RO7FqUgRrE9bUVKfYGqwyQm9Cb6W/35xoBYXULYikrNEh0XkbFtTh0GRtSMHEy/MasrKSe7XDkFUR2TC5OpqXGpQdrZk5LLQrHTZMvHx3ZjKSeWZMj+67PF69Y3e53IbD4WzvmNMWx108ObmqJrb1LKMWRUciQ6QisyLYkuoovlM/eszpBxtm9cNk+KlhmCs1G0dkfso6tG6Dpz+FU/6UHPNEIxu+BeSA6NG/nRky0whRYp5vgZzEJ22shvGmO5VIpAoLiUXUoTim2MaB3N+7R/dLmoRodIknggS7D8Zj386s6V+E3Z+XRwN8fVMSzkdPqn5Cte6PwPnZDZAsStSkhDqKaTZjMqZQTL0Z05a9Loqc3AikrmakLPG2f26PP61iXv8U8/9M9Bw9vmpStGrCtOpIxYY6o2+rJp/h0+P7tcUZrlTa0GoxWzBcQrbIY5Epp7clJk+1IyXF8J/CmXp2k2pBruQhLFKZ8tSN0J81try8fzFTk3aMcnFyOpvRzFT/xJn+AWf4z2ieY4OIDcalsmrytA0bbGfEIavpFY40CHI8TK7OiwxNwRScmTH816Q19yPUZKeS2GVDKQHynx5keM9ImG24a/BD3NmjeyUKug0bKqORyg11G2Y2afXnRyOe6Ia9wjPCMxuWVNSZjNOk7bs+O1W5sQb7ah7rj5NCgCG7omz9hF1Jtn7StOq9HtwrrJ9c3SAwYWjdkJpd+RhXvRe3FEkeKlAoBZInQh6oYviSDYKFp8/emwSo57ESD+D+WU0MeJjFDGMwq0nQwzxmmIBhkh6W5GH0IRkzdHJ1e+7hU7KmB3KjwLiCLQNq7CpAnjfPG8MHw0X3+4jY/H1ShlMQkZpxYUS9E4RTcjPY4J3kMJvD4RgyBfjT5rLb0c2fNovTiW7+FJNOb+lCaaVwk3CHRXpUYlZQZEG0yswhsJdtgCOQtOVFS3sCi2D5TdrhRo9HmIKOL5JetxtdIYcDny6nk4ceS2a63coU8Dgc9HQ68ZnlkJNOd6lMZbmoLJlFcFcgyJn2faycrYFgYqznSO3SRMJzIqF/0FM+pqUcBg0KlDFvWa+erBZqE0yPzIt6FUXt3adP3xLhVOPgP03+7YdFK6QrB14V/t3wl2dgL21CneNz3Nt5IQcK2Zq9IGnHk93sdmWKJFVGp0bnRpdbr7Uq87MulpdYl9tXy6vtSoHfKgYLCnP9OdYm7bNG6jPuoA4jRzKbustqTfPlFhZ26wahnFzknnBurhcswSatledAxzc8BzpOJJ0OzBGMKw7qMaVJ+yQZox5TfNRXikL9plioVejE3lLSqbeUyTGztJhZWoxKS6PSYnFHiEpz2KgMRxO+VwGV4Mjqju2hTLl8iHNtlDo3wgctYozYiUY+YOQwRutkIx883aHo42fjY1abGDA92DYeteUt+Cwfy/1jjuLjuDFQ5IZB5S3lBF9ZUbmnpbysCAO9ZV5fGfMFzLEr8eYV+1EbUlR6uoQoyyvu26dP79J4PIrcXNx3oKC7Nwnxba8sn3vBmpvOqX96Y+ut7OxV/UZVVf5qc+u7bNF58aHT+k++bWPrY/K+mr1zznuopOCp+gt21fUSJ3r9c8eMXNzt1FbV0W9h5cTLe9F8uA85gXb5dmhMZihyrsWiqiBKNHQ2a64dLCoxZZHHV6pOFkdFbBGnYMtySlbelVbelVbeldZ/oyut1p/oU8eAc4nVzb6rRRY3urV2zPEjP+jHXj2x0zLyDNwn5X+/WUx8f0i8Vt73WOugR1udj2GLiNcL8Q1l+FXSwQRJzJXBEpGY1CQ8nMxTBZG/ishfReSvIv7Lr3LCfIN/mm+g/AhXeD6p1VtO7aUWb3pG+KO87+TfH0NhhLtikFqwfU7cNxxL5s7xLkwXqjxV6ed6zk2X7I5ct8sFgSANCFh8Juf7TM5Hxz/30MTzxS00CbzktnDmt3iI8S00BXpQMy1ZkSyG/7KCTv7KTv7KTv7Kzl985aSdv7ODyvzRscts/+anJ8TSWh40Rp8H5uDR2BHXE9MHcoWMdCEvz4tuYvcC5PHNQrdbxlx4S81XrS+1rmdXPrW5dnSva1uvk/e5fHP2LHqytaXlUZFtXDl9dYaT+rBau1H+Sn6D9jbs/uRtM+Jb4kJmsG+GYA/hvjmaHUoPp0eVQrlHIBEfIJcH+sdHy6MDI+O18pRodXyxfKV4hbxR3CjfBneKD8AO8RAc8n8MHwc+DmaF5AQUygNkqVa+JbgpfiguxfyF8VJ/WXxkcGSoIlwRrYpPtVR7p2RMC03LmRo+J3JOl/ny3IyF8SvjN4ZujL8bfC+eaQ+yjCbtjYbsMuyxN5I9s8ukYHqwUO4vS4Lo7yqqXeNBvwxKnpiWJQvkATk/N9ctCpb8XNWaZQ57ljnsWW3iMyueFqTBSmvSvuKDlaZLZ+441kjDhY7jjTRI5EieTQOXNkrIihTWFwqFeZwX8jgv5HFeyItH7AxHG532IOW3K5TFntlt1nRzdtby8RzrqT0x5nh7OXd00KCj3kAZCjYUbijbwFviecnzUi2ONmaqhWVLa1nt0mUxf0CNFyjRLvko03z5JcWSD1kAQ/t4S4V4tAt4PVBS3DdeIH27blnZ5nvuf/7F1qd2pljFS6+wyh0XtXyybdGOyz//9dutH7Ls9+ZNP3fOPbWJdWVXntvMpr/zNpu97+nWB995vPWDG4pq72ZlDcx2a+tbrZi49fWCAZkk9aZrn0pfyn+CnsLze6FA+2cjzZp4k+GI0XwOkCvooc7J5M8s/nSaS57DdNhNR8h0ZNNqeDaXJ0F6CvzJ+HOWOEtaLq6QpFhBb7EsNFQcqY7OqQgPy68smCTWqNNzzul6XZorSuNMczHfdMRMR9x0FJiOKJ+memLdETMdcdNRQPxSSa6uzni+kC8WxPq4S6PDYhVF0yJTo1NiF9oXOBe65qbPCV5uv8J5hftqz8X5y2NrxQ3265wb3Dd41uSvjt3i3OTelJG7SyFVLNkjL+7LjmdZ491YHKBblk8q7hWHOTgbnT0uz74uW8iO+Z09cgtiLCb7ZeI8vojLuT2subl+kXjlaAK5pBZhkFrkmkBZ0VH9m53sEct3Oe1yHqoS2RZVkURBYbH8LhiGK1V2j6wksflNKNGO+qEHo4XKRyEeFmHjcYe1hN3MFNbEUklXD6qSqsYWj7LSfDD0l29M/YVLcHTFoRvrhspbo8slTOlGzQ5Tmd2yivMc7WYK1w2xB1gcJfDfeGKfKS5JJifdlMs3maRqZq9Z+rJWO+YIzRSPPndOmNPo+FHeGZ6W2sQRehynXsBZhD3CFYSaXj2BlD/zw9p7EoBI65srlBT30aVnfkE83ru0T58SVCdwmuF0UjLSA34pwLULmnTx6U84Z/z+6sXbJ42fPqD1wgnzL7jmm9/c/91aeZ/7sUdS95b1Y29X11+x9tQ9L7b+/Q72lueiG84ZsnxYxQXRwMxE3/vnLH569vxXV7muv3HVueNKShZ2HfD4JRcfWL7ic5LEo7TPpJA0ELpCX2FbsrvVaS3MdGYVdnMWFpY5+2T0ze5fOLKw1llbuMA5v7Cu5wbn2m53+u/KesSZ0VWfPgrx6mfJTHI9lLm9657MJ7s+l3mg6x8z/tLVMszPcmlIvMRJPh/nJ85VvVHvTo4jVzgQDia6F5aWSWXdR0ojuk+11CTmWuYnLnGsc7zk+M75XcLbt9TFJE9RfmmgOC89OKPb4m5Ct1CRa5DrJtcWl+aSt7h2ur52iS6+6tFouxwkYV3EFTThXNSEPI9HwQgSqy6FNHpXnFjCFSQmcblCYqBJ2J50Brtzbfe29FBIhbamQ0WBrTgk2rvN9Mzkqyzfe5D84RuSJu37pItvQRS+/Mby8onJDIHwt6SdQvMl4jD0H8Hc3HGcdx863kvaqXX5vF3o/54v1vlNwrlJV0ES4p54JN4zvjMul+Gc4byLguJN03Gc6xPxXhSZdObidqSsuUzYWsbKAvQCC6nogIUaGogFuxTxmVDE50YRnxtF+fuVA4oQVgYpgpJOIaS445PnUVxcxXdwFT/IVXwHtV/hmoXi4io+3xwpvfq1qRN8x6PPFdwEIf9TRO1RU1FMcG0j8fHHNJGOJAYdbUkcwdlT1C7vUl3MlHERQxOKz6SlSGBpjGYFnzN9+bd3aQHNGrVgoMAnkT8jI90fiMZFRXWhruLHqYaJxPLZexfsfGr48hG9F75zASupWL/y8pxU8KKD163fPt5jDXR5KhQ4/7nF04sXzZ93Xzxn9ZTKHWvGrhqb7nJm5cdsF/U4u2ZpcOn1VcmZo8667NipNWf3Y3/pGvJ0HVM0ou7ccWdfSrNpLc6mMOqGHshhLyWvZLLDnS/3litkeVA4FRbC4S6hktCQ0JLwzWGlf1q5vzxrtH90Vq2l1lntrvWfl7XAcqFznvsi/0VZzeG3He8E3sn8MO1vgb9lfpRzOKyFMyNykbsovac8yJ2UR7vHy3Pld3K+lU56HJ4Ml6QIkB1SVGbLCLnsQXNtDJqyk3ZyyRhn7vyDduaxJ+119nq7FE7SyNr5jLEHuVvXk7lDV0jsVAjxpp22y1y5IM6aRGNvX8G8AtXk5eziLQEfnxwSnw+c58mNc6hE5Ewn6tv0mCA0MxT4W1mKHWNSmA1i45jISHOiecNoTuUQhzPOesxDJTEfsR7jrMfoFYm3eVI/NY8FqW2MbztZZu7wvu03J5ytlpWP8bQYmz2U7Z6WM7lVV4gG8a0e8RzpPrAUt+clXhTPqPd6INqlQETpXMJVX2Q61uPhxmW7zt+5NNn6zX89tVAonfLrSx598OJLHpX3tXx707ibXl7e+nXrm/ewTfunXP/aKwdfeI30mfHaZ+JRlLpZ7O97IYA6eRdu0uA9aOVPN3969D7lz10CX8ZLXSvdzG1nSRgPS0AEyReyq8GQZGeuDNVCHabyDlMd1GGqhzpM5RPrtTde0Net52qLCb16ZieHWx0sHBqaNjQwKW1SoC6tLnCXcJd4p/MBzwNZDosz07ZAmC8ukC92LHHWOx9yPG7dY3vc4fA71jo+EkRXlxnuxe6VbtHNSHbGewI1qg6bdTNshcNwDKzgdtvhdBtD2HSTId3EkJxX3PkuC5fLXbK5Rea4KVW/Sq7ivJRvT4QZA8ZY0pVAzSxJHMSSlIr14bpaknNEkrPDCM4EWZwJRoYyOLtlcNbL4PIuI/+AysLqIFVQXZRNtVE2la9O1HdDeN/hs1d26XNtWySdQU4LuNplhnFrLzAy6mHssuOkVS/jnYzCzFtW5Kk9gv+4OoCsVGOYe1iA5Bd4S3205rct+cRTYvmunK9/907rP5Z9ft1j74V3Zq6ctn77A9cuuJGtCTxxgOUw26NMWLXz3uyFFz77pzef+RXKnErkpQ902xDbn7zKJkjOmLPUOcwp907vHTpHmGybmD4pdIEwW55jnZVeF2oOvyEfSvtL5sdpH6d/Hfgy82MuW/zhcCKLBFJVFkkn9Swh33mWv7/Q21klVDgr00eGzrFNdV7g/Fj51H+SHXd5WAZuwT1ulDl21QsodMQzhM4/n+BCp4TG85sn+CjGvG4zwZlMUMCZIObxHPQyjzfprfPWe1EuEefq0snrI5Hg5csxySmvQnzu5dLKSyXYaRy9LhpHL+2raCi9emW6I1nHJ9IKH+cGHx9fH+cGX77KNwqqh2L2qwfUD1RNlYg/xqmimsvnD1/31Fx9XnGe4YqEmsV5JjO3dHw7SUPaHtcT24QLDyznaiRKnPIjuqApJ5wWNUtrUdL0poUNVzadM1DssPTTokbsN+e5lYcuXvDG6rpNRbtbIo9efMmD26687N61mzeeun8LEzdMGCy4TlYKvldffvqFd159jlakKlyRclHSZCB3bE4GwhDKEKaItXKtdYp9jrhQXmydY7dkkFJidNWR5ERy5YToWeB7Wz6ZfiJL6uXrn9krNNg3JmtwaIJveubE0EzfoqyZocuUyzJOCCeCHvAztzMQGO+v8y/xi/6Q+2bPVo/g8UjZIZsK+4TtNEtMyd6c5EPlwQl9WxqKhQAtNFxBMRX8gGkmCSRxB/ceV9Od+o5ZIccXfIidVJS1oLA05WTOrDD6dsfipUSfIDUozMJ+WhumU0H+El2YevioezgfePLVZH5hqTnW+qzXJUCk3biH+LjrsiLER9zPRx/H/cwVpjbB7V9HMAx54MTSdoaUo7hPSBzhcqG2vGVpOe23y3ymLZE0m2WmWKCtNHjT1Tw/DT3Li3PlRjxvX/ev9n7e+jVLf+8Qc7HvP7M1rJm1seUdYYKj39TrrnqETQ3c38jCuIY6WNfW91u/80R27pvHbls7dN5DxAlDWieIXyAn5EIhez5ZZ7fL6d3tsfTR9op0xZqTmdPdHk/vHi2z90kfZa9Mn6pW2+fZT9q+zXCdFe1eMDA6sGB0wc3dt3ZX++T16Taoe6W9Mq+i2+S8yd3mq7PyZnWr617f/Z2Cz/K+in5d4A34lYwmYVdj11CaypcGTwR68oWhHprhIKBmLVyd9MihkNtW0SXksPkzSmIlNnPwuYMviDRzC2g8bLFg8GCAeQLJQF2gPiB1T9pxZLpz6RDg0iHQJh0CXDoE/DyOzPpcOlAqhfy6dAiQlk2jGNBtY5zlTibncZ5b4WYx6BLmnBLmnBLmvBHO3+8+4P7ArbmlsHuQexyufDzczWWHm/OMO4t4xt2FaneHqGY3lxVuLivcmYnuK/JIXCTGnmabpca20tNeYnCRwdnpRDnyzxFioSNEuSluKS4pgYA/oOu2ZJoRdKkR6F3i5bvGeFo70TF3p7146Iqr1wdd7JLUu8cu+sMNT13x0Jx3t/73F3c8dPVV2x674rJt1VkTYsWzp/VNXc/K/3I7Yxtvr/9+wT8PXLZDLPxD8/5Xn33hWdJc1gGIZH9OZ7ftBT9OuIxAqUibOb4RiEm9xQpxn1PiQRmBzNKAxevwposyA3dIVtPtNoe5BjjM4eZG/0LqR0fMmizpU6pZWbOV+fkC4E/S4Fq78mc6DayVdlRe6l4r1y6tWZTOSgYTPtDWdBpobiGwU5usNlINKX4PjYB1rJ+kRrfSPqUp/zG/sMS/1Z/ya37JL6TzoU7nQ5rOBz89xviBkQdbdYxOUiPIvYdB4gZVw/Z6MhmgZhmqroUaA5JpddX1UhD4JlLgavDYjOHjg+0ViaUJw+yMruNnMoB5GqHrpGWMBMbQy5MuxaXGXIojmzkt7mwGdI60ChJ0nlSiq6p+f4Y36uVDr2R41zVe03zJ76oaL144/oZy1Eu/uaX2gbtbZgj3rrty0o1XtzyJ0mE9Di5G0YkceziZKdh08zrfDfKnyp+ka5ubX90hmw6Ja+zcfmbnej5/Kvyp8idmbjFVO90hmw7M3JLMIZfANw4ifyr8qfIn0yesUTM5ZNPBa+7PrUF9qPvHWW+2brWmrM3WD6zHrCpYw9Yl1nrrFiPosFWz2sJW1CdVSRCtikgj3IPXeg0DRVYkm6LGZJC2SFullNQsHZaUZumYJIAUkQ6iT5L0rYdANRvDL/Hhl2xUv5TOT+Z08wN3tHJRw9tpI1aQxlo6MsGycn46iEOd4GsDgSb6sva2ozM/ab1LMkQc7/WNjY3SlwcOnMqQ4qfeoVmKoyn+k06JhBf26CclbaYzi+mQSHHqxYeWc7zCn7Lh5gucMlWZZhXdzr/LJxTRap76HeebQ5vpsJoOkexA/IB0inipTfApkbS8Uqzu2G5fQamVlnikPpkH5PGA5LUYokiSLCl9rcMlOab0sFXbLhUvtr0jfqSoDyksqsTVmKVM6Wcd5BznrJFqlGq1xnq1dLl8h/UF5Y/Sm8oR5XP1H8p3lgyfzSaLoiQoimq1WtBjtVhiqpKuqoooSTHZli7LNhsOt2RhOJSyolpwsoJNamLupFWWuHGqi4V8FRG+O+BWDTXrZlQu7LwT7VxA2HkX2WMg8ECBBwo8UIjhlhbYIBiHUwqHPdmLiwYPtwrxHSxwDqE9MooCvuUAvveBTIfzr3nD57ZfGHABGKMrjboBcukJMkDiJgMlgge/yCzl3kDZOvmshHS15zmkwYQLHarHUm4pF/nTMP46q6wsbL1WFKxBp7cURcXSGmSuodOrkzZr95wyqyUnpxyH9v2GnDKFDj4inOzKK+MNqUHVFBVUSCQwx15QtOaGvDKaNg1+Iu83eMoUnXCfg5Nddj1zgm97qCrfXyRmSfdjbenp5fyBuU40BCnz33Zl68lZbY1u6zltNsV6SayxEsaiTEV2Z9s/b13A9r/feu9Ked/3T7FU6yUts4XwFa3nIu+vxgnQlyQZG5N0tpdjZ8iupLejpDpDOpHZ70xZdIb8oZUF5wqXNnQLYHfffvw2wO7S3jrt2UunXWL6LYEYLpNuOSxvkT+QpXH4OCaLYXmJXC9rsoTrik0Q9aWGSuJLTkZJ79ItwJpxFy20X3f+eXrdyWm37uhsxQUPWEjqgCl10KGZh3+G+IGx0pnih+QPGeVIAnHtYtkPpQ0NwepGed/JSkMPUOKoTUaFD/dCmiFSPO1OOXWH13TkmN0XMh3ZpiPLdOTolk8jDTmyTUeW6XCYR2tO0+EyHW7TkWbqjx7T4TMdXtORZqohHtPhMx1e0+E0z+EspgOF1p+TY+zO0ph0RDpi/Wvg44h8SD4REQKWSNQazI5YRTGaG1IySPFTmRLNyvTYDsbYzbGtMSEWCGS5Yjd7mVfi21l+DOjl9mG+nU2ngfSSEA3QYHoFvql18E0ttwx7zWPcdltbVpvMDfKNTZBLoyBn1mDs5myWzSvIbqsgm1eQTWYVL1WQzbWUbG79yCZhxdWlbAdVlW0aobOphq4glER58VEu7KJc2EVj7CAwMvUIYSCRJ3KRl/MDkcftxeA3dKLvGw32PJ5M58qRzpIuXQrmx5rYZbvzhp+pIeuWO64Mt7Pn1ba/t0H+lrEVc4Z9snQZysfyclxLUXp66HiTH24aqpMjPS2e7vBmM58zw1SdzD3YTy20yPoZ/HwmQA9ds+I2m/Y61r3FDy245Lfha17evH13dPrAJb9prJ49elV/KX7b2BnnV+/buaelQLjnwhn9b3ug5bdCw2WXjb/z1y1vGxr1JziT/GxXMk0WlTRhm6fJ85H4adox8USaItE62QVZ7nIPu91zMHg4qAWliCXdle73oUbNFL/T5nQ5XCbTuswZ5zLOwtCVH+RadJBr1HauS9u5Lm1v06XtXIzYu/AUlJPr0nauS6P/O5057DbDCnwiydcuO1fX7Qz/2ccGSWx1J706eCwoLAluDaaCzUEpKAolGX7ON37OQ37OPX4u7040er3GFY0fVadtHdRpbzt1WjKkW3PS11E9HxvwnGh/6KYr2Me5in1GREI/CMfFlLTsQUdP69h+xWu1WWyqTVQ8ca/iymZum89gmMJVtAFDpuSMYRw5tOOKdfdd/Je6e8d7bI2FC0csf1iK/3ZnxZIxxVe3LBfWXrRo8C2vtjxFO/Jh2mdSAY68EzLZ/j0ZQePI/zM+td0kCOaQK5NH+FRbpmO4MsIyVamxXKDMt1hKPf19/f29gxWeKl+VvyI4XZ5uneip9dX6JwYXyYussz2LfIv8s4OXsgyrIjvPFSfLk23nOi4U58hzbBc6bIGQpHpRUKWbHJNuWuXSafOUxjdD+dl8j53NWUdtuySnctubYV02jwm4gyuO5KCRUnXDDnc0J135sdKeKgPVo0ZUUSUu42e4aq8PUFpRikVktkG3i7OLy6GfzXFuzgeHi+648LtmwI3bEOLcwQ0zhvzg0hL8nD+SWB0JJgG4SQeMy4NcA+iVRaYbvgDWnsEJnqWJWlSzas/kD27SxYWRbHSkyVgnyZOs58vnWyVSVyhVmqcvMgLoB7bQfus97IHrnn+X+a/88voPWo/ubVi3tmH3mnUNQhoruPGS1r+2vPblr1guc776yqt/eP6Vl5GJ87VvhEL5Dgiw8F5wGEua3VzbLKZDNR2K6bCR+hCNk2bdnJyEjvpM3OM4nDYmgt9jTbhtij8k2t2eLtCFOQ1LqG4DtfFFMuZgmmqpsFbUqUvUevVmVQIcqK1qSm1WD6qKSrYTmqWqfhzEHd/wU0hVlz6GgxvN9Cmts0DSzplBMThBZ3B1n7AAgqzPrrkdNJHjRzxHjR3xkePl3FLaUu5FAe4tKfG8ROqJkTQW0K2l3mjvEm9fnIpRbzrZzARP1ujy8y/sfu21ux9/PC3RNffeLZ6Bc+4TZm1k6oWtN2xsuXVM9yzSFLXPxMP0Kzq2ei9kkSkRtTQhkuYvdfN7n7700kQay7ek+R0szW9XwObF/oMSvylr/ebM8bfJWn8sGCChmMUlboDL2oCPG6TaToIDXLYF2qRsIN0wTRkWiwBfggP6lSHsMi3AmgMsMDaLBraABGzWsSxhSdbWrFSWliXRQQ3ZU/hQOhyGIaXNbIEbXWvEehC3vZLVVB+5dcTLrSm6DcXGLSdUIzdYWLmEtXKDhXVs5hnKomGV+KEo5WN2hB+hlfNzW12QZkkel9PtFHC/pVhkC4pTyZENTos3G0iYFhauglpaeQ3bdwGOZ4k3PcCNn33ILQ666tB594/z2Bvt3osmTLhxQOPdjSMWjeu9XLilZfcNvYZPmHTTeqEMN764eQeQn8AR9ckz9gLoBwOmBOFmphyHvlcjxY9rSrJuaOAnmGI7Ff8L/WaeR+FTTDFun5w0NU5+vQ5VTsXQLk+ak9OIUBVDWf2GlDmsUrc6K9yeIhkb8u9Pr9O8nW6HoTsfN3fqRoTqMMyix017x3F9B+LtYkR8Zm7pP9M3Nt6IHm3IhfdNI8v7u08fJe4FH5lM+bTV7y3wJyZ/o9HhFPTS7OTyRhx6RHOjS7d6NCeLyOVNcr/NKzJwKKrCFLcNbE4Hvwrs8DJBsklem6HX6hLCW5RIvPaa583XPG8kXsPtBn6ME36di4gVspEL01mh1M0mjPKe673RK9L7KNSCw6Yl5bB5YHEsaQ3nlXpCObqRIflEOL9UUhzWNCXbmumTJZAUu9Xusvg8kCamqyFLtj0HV5OYWmhJuEqht9rfMsA1TByuJNUxlir7UPdw7yjfue6JvoXqbMsFvsuVK9QVlr3KPvce37fKKWtXu7crdHUWuLq6C3xF6f2gr+9Sy1rL7eJvHQ+zbcI2+0OOx2GPss/1e+lN5W3rZ9Jn7k99x5WT1pBdoRY7+NOj6BdR+ErPnz5jp55tc7klH3gtqiWmumMuXwzApYpO5ojhfuTNZF+aqU4hxgr5kaeTpacpNrs3bkt4J0sTbdO9F3qv8m7w2rw2SQRGw6EPzOmuruX2paLE8SL95pLnCH316xb4LzuZLsoyTldVttpsFmRnm8dLR3dVu2XwRZq0kcm5Nrcr8qxXtURUr8+XkNV0WVZdOM4xpyvd6XRZUEdI2CzpmB1kAdsK6YzRvX/VJ1ncXofLyZvnQ+lG95cFgSk+N92btaWf8DhZnZNOm0VnE3s4aYuMs7HFtpU2wdYkTElax3nZYu9Kr+Aln90jszq+ixZlTPw4O5F2Yi4/kckcc7y2NthSuxT/ZWW2oPuTNqOJx/j69I1BGRnb+HPdmLMS665+bt1ZwR8S5Mp1Ls9zqstTTiA3oSoVnlTd6Iw4IsJT2mFgCJd2sBF6uiM4jw+zfsanpipVOglnnEU7uEvtyXhA3qSqVAk/QLZoh3epET3Uh6G5PBQL2uOOUNkoCQ42qD2pxAboJ+zTa2orvC1fgOfzaod32yJSBCjCMPNQaW/s8ZVBdx+/0rorjawtNebeKqFfS6PLnjXQJtB/cj9E0pqVpAX69E3DJz5YVCwQWVXrk/seGSSVPLJ3S++z9+xsbXzykW5vSfGWu454XxYuarn9ldeEuafeEa56/PsDKKu74r7nDZTVLvb0HhY0xOdnuuHF1yS8ZBF8rNgXIAvi60krOtjAXG5PfCY5Ch3dhK7WIk8ZK7ONZJVCpWWkdZxnOpssTLZMs473XMhmCbMsC6xXshWWK63XszWW66zfseNCdqYlzrpZEtYyy4OWt5jqobNCT0apgP1ipau+UV8ZE/pbbYLFZosxAflWYMjQijBTTqiKYpvpBDILJK18GiZcNqGJuRuRi2XlSeFcAFAxkm911S7OrS4GrqSrzlXvOuaSuaDPpyjXCrBdw9hOYONgMWi4ceb3diHT7VmRd9Vz+tZXt/55WshxJOE5TjzbgqsrqkUfo0b0MT85JtsfcqfH9VyCX9nFIeTjh0P+eDcWtwjM7D0L9SX6nnmCepG6kidkS2tYLWcQi/Z+g5s6wSCfPZFdZrX4s88mqdoQKNPvXPrLhHRElr/M5ARimJLeTInm9c7LYGqfkryMrsIDy6tbx4mzW55efPkC9uUtokW55dKW86603sV/7ko//8m8s3ztWbtnuMu/tWRb+K9g7/uooJDoK+MH7Dm5s+UCD1gc/H9iYDwHz6cObB0LQz1wcufJKzxghLd9nDWKEUS/YTaQEt6C86TlkIEYqebApfJUqGbrYJqwHa4iiDmQlB6FZZh2O/oHI91HeTH9FMQHiHLEVESWETYGMRMxifyYdi/lxTKWUDmcLodpljAslqdqLVjfJvlFmIvYjO77pI9gm1IGi9D/AObbLwH0pTSYZ5OyHW7H8LsxfhaGbUZajf570T0d8/U03Fb1BsgkilAwvBuWc73xvgXi09BHWq79Fd+lBsschViLdYxHWomowjRpSIcg1rEXYT17UbsP45HCaqx/HYUjhhl0BJazBuMHYb589K9Gdxa2Q0HqRuQhugqPAjIGPIW0CN//HP29ES/CPHrntnfC9htt+iH0Nla1B9b5X4ioUKZ9jNTarm0dsboDRoolUI90ISIbMUF4DRZJo4Fhf90hfwwiATmP+ul9xNnSbBiLfobtnCQ3wp3kR4zhWK61SHfDVvE49MO4K5RN+B6zsb97IU5AkfA36KHEYCXy1zAsfxViM5b5GeeH2TAZ6z8LaYn0MeehtYiNWNfXZj9R36B/FY7rRKzre5oRmH8SYjiOSz3iQmoP1l9EfU7jzqa2lmHaI5hmOgHDAxz47sSTlIfyY1kxgw/vO03hPkxzA/brYaQSIoPaYILzmQGMewHLyUQoiBzEWYiPEfchFiL6I6oQXbFuwHpFzq/IM8SbnD+QN+QXsQ+xbZxn9XfYzMdTnzP3GmVRPXnKo7DQQB6VSfOFeBbbssssm+YU8YxJOX8v5Hz/Fb0n8VQbxbknfQnDqQ18DiJvmZTmHbaZ5sMmVILXI70T+Xg18Sy1z6TUL8RrvE9wThi0vN279uRzBKkIEDV4fbVJzb5oo/PgASyzTjkfZcpWGCGtgBHir+F86RgME7vBWXJPDMP3wbQp4UuYaGmGEhzLcei/owO9naAeYgvkZnzPHdifh+Ae7NOl0iGhi3SIyfIO7XMZ2EvyDuEa7v4B7QjWrMcRJbSP+3fD/xMIb8o7UGbu0L6QD2kavs8tNCfUL1lPRMSkGN6AqEegOs9utyxkTeoU8OBG8DhisZSE/nIS+krNOD4ZKOdxLmD4FPmvsF+8Aa6TDmlvs3qoFw7BWjUDZgqbUKZhXcKbsJpA5SNd0o6PzuC5jrxkUpNfO1KS+QZPhZEqOP9eN3DEwAnEt8hHVciTmbQ2kHzm6wPKaMRanV+1k238+RI8iPR6kz878OnCDvzp6MiXHSlfW1C+m/MU23Gd+f4kH0nGkYwkOUdyxkzfkbbLv0HYjnxMcvg1mGbM6y4GRmEbPzTmPsphHO9zNE2p1B5WGrVtok/bphSj+88IWXsY3/uytjW1Wms11tNu5lqqh4PdXEflElhkyLMHuLz5Bn7D19GpvH1WZSeslE/huKMM5O3dasxB7E9s90KpDvv8TtiI75EprsP5iOGI6dQnfCwAgrQu0Joo3ob9TGvRDbBafBf1BcpbAl6+XgyCc7DtL/EwXFOJUph8DtynfAnF0hSUtc0wm8aK3oPaQ2NvuRiclgyUE4egl/QIpskAG6bbyvsgCQ9zvqC8C1Glwr5QZ4GKPDsW01B59/I8SfAZ/fEA7wueH3UR4mHqCyxTyYCJXJ/4ErbIU+AcnEP3qvVwLyqlgPNiG5bxIOabQm3BfFl8vb4NzsX5tR5l03qUOcD5f5p2StyB73MZynWEWI99tAOCcj324UL+7sMkXcauo/kjboc48YhyG8ph0idugw1SAiqUhXADht0go5zEeq/HsGtx/vbEuXsd5g8bchuw7uswnPIOIl2GdASaL2oS0pR6rgcAbwPpKVi/+DncK46C9cjHgy23YT+sgR7I0qQ05iJ66eD+awxs1MHDPDpleaIHrqZwoQT+iDXYATRaQ/dKq2C+NBWKxV44d73QQ/oDztXv4C7RDTOkl+EuqQk2kl9Kg65iCt+/EXVLCj8A4ylc+CP6b4dpUjnmXw8XSTNgubgLee8NsElzcawxn3wj8kk+5v8GyzXAPoJp4lScW2vR/Z32KKXjdTRq5xCkEdCD52sH3lYTHdosVOFbjcIxxfaS+4z2Ylvb2mm28Ufax9+TysV8lEa6C8qxn95DxHTaOkG4AXYgtgrvwFBxDFzOtmn7sF8rO2BEe7/Um12FOEvqDU8gVqG7O9L/RuzU/ai79YZ3EWuw7KeR7la4cZCBMAT6EMWwzYjbEa+Yce1B9fxYeHvI2dq+M/yP41qDYMe1fYSO6bGf+2B9faSztX0E5MVRBGUlpKuXQLpYgOG5mK+DX87G+fQ45Iug/eOX2vRzwE/Pdv2YbP+O5ngg9f8LeK8djRA11ob/uG3/KXB8VyJqef9+BRk6D4GLvam9h3QqexM84sXIgwj090B/mtmf5jhh+K08vMP4Ia8A9XnH8I7+juP6S35hN8xoD5MP2vjhFhhIkAZhekRHv+UlGEhQnse453/olx7+BUyDQvFOahPyYMEP/co4KCAI+djWLMqDcw7R5j+AMgJBaXl+Jwwn0NwlCI24X0O0xfeGCkK7fu1D/Sreqceb42OOS8fxwfYlpddhJNI40jKkk5COMmn7Odtx3nYMM2XJj6XpMDd6/lSZ/18Czp2XES8iXvj/dV0MkFcRHoTyHuohg1CPPIT6ybl0Z6sFZcn3RYiHUA5NRvoWhuHq3doN4US3F8MuQHoPwKlv0b0Mww/p0AQpG7YaemUmhu0x8lqM8ibp+U/9HuDkccROPf+p7YgF6P4fBK7np/6C9Gmkt2P6LzDftUif0eNbZqD/EsRT6P8S/RciqtF9M9IMpN0RaQgf5t9EIH3kB/vQ/3P64/uPf5WizjIL2xkmmxfSqzruIf5lao7nL9COew1z/H+JtrMZdKB6P+Ce6UPU+1Lt9z4/t8cxKY5na3tIU7QW1CkdpEeTLkv6M9cfDcr3b1yPxXoB0k1KujPpr6Q7k/6K9F5uM5B5e6bQPp+3y1g32stWdhw2IzyIbIMuxDTfCQXa6yh73Mjf3+Le6AEC+l2IqTq0A7h2uXGt249y91ukr6E/B+m35ppmytYfyNhfWNP+r/3/7hr5H6ypxQZmdMBPhZvoZ2AkoeNa/O/il9bu/3gt/4k1uv06/b/1m+u8CetAKCaoSW0foaNe+gM94Bf8v6Tn/rv+jnrHv+3voJeY/o74QXxH3jP1mSzIakOHeffvgvYW0uOndX+zDR3ncdt8M/zYRxXtgXKgq7GG3ofyAvV/LQeBa5R2C4ZdY/keii2PQTH6H0fgutl6FOlsikO6hd1A9m2tBf2/Qr9Heo2nrTYw+5f4uSPfkn7O9UPsMy4Hb6b2QxFiAMKH2IVYZI417SGx7rcFXHVpnytN076VXkd00AF/kfaGpYjH0O9GvxtlcbriRbmdhIfJHo/UhtSG8n3CaRuf1qJcwdOM4rblFTAC5fxF0iGyfWnPcZteK7hVBz9HWY1raNi006E/g2xDaoTsJVqTYZ+rU77BdfAcXA+ttHZgvVP5mdBCiey438BvRDsMM2zI6aYtmexTtF4pZ4GH2zHa25E/gl7SdBiGGCTp51RTyP4ifszPataR3V0cC08Z51sp23bYbH0RNltmQ6VlJT9v2iTeDasx7G71RrhbSfDzlSnmukpr4o/Y/siWmdVm0zTeuaNOwNs3HUaTPaZ9vWY+SyWupd9wO5Rux/wF3QbX+A2I2fp5hXbix+2d2quG3XOescZf0rbmd7TTT4cJ4jW47zNtsg8hfRPOk9YijD7u2BazLuyXlp/ShUzdBN3ncFufft5DNqi0dudwlbyfP+fjNZLGTHbiHHbT+Gt7Jf18boh0GaYXIFP6GqHbHvn5HNmGEecIb2P6zThHL8K5gjwo3crP8K41gGm1h3i+C/VzM2USYhC2ay7m205nRyZgzWloR6QpsIGD29W0+4R0bS/SZcIr/IzRbZwFZkobYTK3aZ4+EwxKXbnduqs0GYHjj7gc/fn83Q3K+yqJ+dy4r6N3JNvcWQAYZxEHGDZSI636BFSqSeRXO1TKuyFfXIz6SzPKuhCO3SgcVzesFj+EXKkfzBK9MJvAKrXX2ZdIUVMnCF9g+NtIf41+Ovt9C84zz9V0+zSc4ngZdQWEcZZLmEMQtrM845ywxnDn6G4MK4M9HGYZ2+GhdsB02oeIU8JvsO4hMFtowjq2YluwHtGD868DMM/5Broa9QyXzsE5diaGdgTmJVrUERhONNYRRnhWR2A40SEdgeFDfqQdP5Xup9rxU+HxjsDw+P9BO36q3GhHYHj0Z9pX1REYXvVvtOOn+jm/IzA8/2faMbYjMHxsx3agfMJ9bOsLuDd9FOmfjfX+c6SjkSL3tT6HbtxfaHMN/5+NdL9F4P5XuwOBe2VtiAGUeRrtgdch/RsC99XahNNofQlpSL+HYdaj3YooREzV66K8rU/qdXMYdbbu1vO3PIb09x38fsQnen28bpK9+5BGEXca77feqDelt7311tPpW0P6O/J8qdPQRMREzB9GOuk0Wh/XoT2L9HcIsou+aLSL3LlGf9A7P0FlnZYLcFK6E2VGHQCu1enqdp1KV8JoLnMPnLFWLeHy8CPYxuWdhrKvHIoVJ+oh98AQ0htIhstzePrr5dm4NgHqJ1P5ed5C6TDI0vOQKX8MM6SLYJi4B/Xi4ShvsQ5+LoNlk9wmnUO8DsYg+FklPxOis5PLYJ2tkesvHkyTLn2K7b0D9uOebb1cDQzzK+pZ6L8Z1/V74TL5SrjCsgj2K8ewrYdgLq5XYWUGlMm/ghHm3lZZBFbZgXqBQS23wyy1O4Zvh4j0CYSs61CvOwjjsc/6mnW3nd2rkI7hD+n2Fc5/iO8TiNG8zdhe1MMk3Funm/cG5Frsk9m8PWP5mdMjIOEeHeSvce0eCV1VK+peRbDeGoStygl8DwX11AQ/l59r9H1POn9SL4Be8jqIm3t35Qj282SwmZTO40x7AOpu90rzuL7o4+dahj2gjZpl0HlbPWykuxId9RpTj2rTKQwbQZvNwXwfpLR+tr2/QdvpG7pNoRn10wxI0Dket4l0pEab+DleM/KSoc+q+2GUKiJ9COYqa2GSPAb7JQ0mqc+CTx0OQdLPVJXrdYtojZa/Q110EsRxbIYa8/1SBM2l4cYcX4HhbyEe1ecjzS8K53MTw1ruNMIXIK5CzNfjKU5bqbtbvtbL53FX6elbcB5qdAYntLPVfKCD70Mi7fVU4y7V2h/Q02f3xD+Vv0j/RRsazWG6U/UjZ/wd6a1I55l+1PM+wDl6C+aNIBRTj+5IJf1+yjU65boh0QcNej/xGul6HWnH+ys/dZ/lZ/RYfZ6Z9Mx7LyY9z6Dxtns5v0Db35M5TTXN8Lv+VdudYXPLMumP3D/QbXKnqfKD/VN7yscEREOPJf19FD/np7s5P4O2O1y/Qh44E1MJdJ/gx6DgSkJQLzwThp7/k1BuwnwIS7gjtL8TsM2rdGh3GfjSwH0EkeFeGiH9uiO0v3P8+P26Yco9WC/C0kOH+pIOrv//DLAPQMUZbPFxqtBa+LNALYOgfm3gehOaRjD73exHs1/w3T7B957X1mazfqPc/+04/m/H5f/qvX+u7e1h3NEzKd3dU3603Tg+HH/Xwe/SbIc0Awr265OIHYiXDdxKwLmSRXeVxDnIT3P4fcW2PD/ggxtwb0ow/Mb9G0VBzU4N6vOA7v7ogJof6x91js5/aoHeT/zejq57fYzv4TTu2M41ZF++dTzca9yTDZNswXWX5nlP6WmYe6bOp03S99PafbhOypjeK6+ASuEV7X75CpQJx7TfyytRF0BgXdcaeMnAVl3303Ya9yAVfh94OzzSHri3zSVQGqxvOeJBQ98mPXaZjtZP9fDT7TJlr/hPfI9TkMnvlyb5/nq8NB/39PMhU/wS41FfoPMmcSYMpjVD7IO6Fd25ucy4L0u2h/eR6nBiv4wXt7Wb33S/hu7VIPidHBqnF3ANoPQv8Pzm/r4rty8tRDn+LoT53R+M43d6sAy660R6kYg7Cnkc8sUETDtB+4N4O9IRBv6JuAjbOxXmC9dCD3Eu7ocPor6TgeFLEYvRHUTqRtQg7kZcAr14+Cnkk5OYHiFK6H8VqYx7exnDvjOwUQfF8/32HpiNOvFsLE9Pd4jn0aHAbPYMr2u2OATLw3QC7pRE1CjEDMOtYPwazLdf37+TXYHS8zgzjfV0GvkoVNrmQqWShrhO2ycP1vaxz6FcmgZeHFMnojeO9evG/oH0qAMI7C1tM/pfFjreCzDPyQ0qPwbz5bOhh9yC+sF7yAeHoVw+AXfJg6CrMh7XsUeBeGkAgvZ2c+k+Mb9LfEh73bR9m1CqIcP6PAzHMQS6v2FSYQf9QBnfdwpfj/S/CUba2w5dI+P3p/W5xvVcdRisxnlciRhh3Pueq5+PoQ6Kc0/S76l2lR6EHF2Poz1UK/aWRvNhEsqGNtsrUbrTRrxl6IKYVXtU+CPta7W+dFYhjKf7Wjzvufq+VCN79W8QZLO8u9350ybC/7/Pt4QO51A/dV70S3czfumuxg/8/+aZSse7G790l+MX/R3OXH7pvAx5lXTkSlxX9ivbtUPofwLxa5SvDxAk0DRuH9X1tetEO87tFbgHHQn5hk2U7KS5KL9ypY3cpr9WLw/SUDYN0W3z2vfG7xy4PZVsc6SXikH+O4gs43cNVP4ow37LfzfRZqcthSkka0mm8jWD7nbjPg3lzWySLcJLUCJ8r8sgdogDSBZxu+QQbOMQTrlbKDRkyhCwCiX4LrfqEN3aS1wmuXSZJQKW10TyDNdfXV7liFm6/BLe0GWQ8D6mMXEc8QWd1dB+mu+p6T7EI3xtOqnLSS4LyQ6Jbv57FH3/5KY5SL+D+SV9ydAtd3SgT5r0l/RCI88OI88P0xtnN7iWpPE1+UXoRnd72/ZdACX8bvQnfL8yAuNJBzmt55v2dj5OOEb62T7ruC+g8xwaW3NPr9vNWt9oR2fo4Os09eOnqJfZcN0dzetAGcfPe5Zrx4120v4kE/n0+ra9n7mXM/caAAOkzfCAeAHqQj3pThJf759qt799gMDvkLwED/K7zEgx7DVMN0JfN/ga8jziIOIPiK8Qb+p2qpa36bdD1C9t+6EtdH+gda/8HvbXC2C1jIZMZZ+ur4j1sIzs4gT6XQGB/3bKxHacVyTHl5P9hn8K/2/Bev7vIfY/DSn0M3jqTCi06vXUYd/wn8GFuhwu75A2GiBjAUBghY7MfQDZODtz6nREpiNrPgMQcwCKalRJQz9E4VqA7tU6iqXTKGntRCc60YlOdKITnehEJzrRiU50ohOd6EQnOtGJTnSiE53oRCc60YlOdKITnehEJzrRiU50ohOd6EQnOtGJTnSiE53oRCc68f8SMPqLVvANlMM9oIIAHiii//VVetT+3yCDsBcmi113x4Phg0+J3eAwQhC7NSRywnvFAjGnYUA42SRGd/syit2De4gRLK2IPyP4XIzYidiPkGCGmIvhHnyuRNQjdiL2Iw4iFAB8UmwEsRixBXGYYsQcMdQQCXsGF4iZmDcT2+gWA/A1QkOIEMZnEWIcYgbiJsQWhMLTUchixErEfsQxHpMUAw23lGDbAw3Xc7J7wYXF3DtT906v5d7d59TodMwEnQ4bqSfrryfrVaoHnzVEpwXddeqLFdcTtTmLmwf7RT++pB8bvgSfTHgO3IxBGLaKGZBCCKJihCRF3+78ePGW/aIETBREBrMhrDWLrMHpLR5sEzTha/BBWPhKOKrHCEd3u7zFWwaPEj6EnYj9CFH4EL9/Ff4KK4XD1Of4HITYgtiPOID4GqEIh/H7AX7fF94Ht/AXKEIMQsxAbEHsR3yNUIW/4NMjvEfcwp/kHoQQhPfw6RHexdd6F59u4R10vSO8g037U0PfsuK93JEoMhzhmOEIZBsOn7+4Sfhjw3fdkKPiONLIUU+KXWAglIhdGmK9wk1isKF8frhJ+Gh3JBHeOrin8AakEAK25A2s+Q2IIMYj6hBLEAq63kTXm1CPuBmxFZFCIJfh04OICC8jXkW8CT0RScR4hEU42IDVNAkHGuJDwoP9wuvCixDAHn9N+D2nrwovcPqK8DynLyHNRfqy8EJDbhgG2zEeMI8HqQdpEcbLwtO7831hbbBX2I99F8ZnEWIQYhxiBuImhCLsF7o0zA77sJAn4WULYMoG+JzTh+A+CyQXhJPxociAEXrE+5+NLnxsiWyJC8n4pjvQS4/4jbegix7xazeiix7xK1ahix7xCy9BFz3isxegix7xaTPQRY/4uMnowkeTsPmJ/IJw33ELWWSwW7gUe+lS7KVLsZcuBUm4lL7wnURtu6uhsBB77M5kolthuH4fq3+K1U9k9fex+jms/hpWv4rVl7P681h9gtWHWH0uq0+y+idZP+yKepZsPMNblgyy+pdZ/WOsfjmrj7P6GKvPZ/UR1jfZJOQ1jCzhpIKT3YNp0iE9eyBKH7eQhz2ahzyfhzJhPz4PIDTuS2KiSBc9cWYu0S67Cwfp/rP6Fy8ePEJ4FjM+i8PwLHyAkHCAnkU2ehYLeRYLcONzEGIGohnxNUJDKJi6Czb8Jv5047MIMQgxA7ES8TVC4c35GiHAYqOJO3nDioxGjyOf8Cx+u+A3T8hL5nhCnoRnhHhTiLlz2bhcLVfoC34/APi8Fm8Tc+75h/Of/3CCdbBVuFG4CXJwIG426E0N3+WEm9jtDfEnw4Mz2G8hV0KuY2UQZzGk/WA59/eGkIVoKYSEHUiLG0JTMZu7Id49vI+5KNee8HehI+HPQ00COj8LPRl+K9IksYbwIQzZsSf8Rui68EtFTRYMeSrexJDsi/Cke0P9wo+9zJOuwog7G8LXENkTvjo0PLwwxCPm6BHnLUdf0h2eGJ8WHoHlDQudH04uxzL3hAeFzguX66l6U5494Z7YhITuLMTGdgvxSqO5vMApfZvYvGR3dZNarY5T+6jFanc1Tw2rOWq2mm7xWTwWl8VhsVksFsUiWQQLWNKbtMPJBP2Fx3SF/6FH+kk3A4m7PQI9+f9PQX9V1iLAKEiliVVC1aQhrCrVPAuqzo+kTkyKNjHbhGkpOTqEpXxVUDV5SKpfoqpJ1Sam+iaqUur4c6t3MXZjDYamhPVNDCZXNzGNgtZkp3xDq/cCY941N2QT7brmhpoaCPovGRQc5BvoLasc9iOPOuPZ7g+lBs9w56Q2VU2qTm3PqUkVk0PLqalK3TopMr16L/uGHasYtpf9D5Ga6r3iQPZNxUQKFwcOq6mpamJTeTqIsP/BdMgx/8PTWXBhpnQQseTq6e7U08UwP6bLJ4LprFaI8XQxq5Wnkxil27U8v2LYrvx8niYQgeU8zfJApH2al2OYJhbjafz18DJP87K/ntKkBvIkoRAmyQ3xJCwLQjxJiGXxJFNPJykyklzXluQ6XpPITqcJ6Wmch800zsOY5mf+Gu2ZnzlDEgm2e0DNrOkVc6IVddGKOYi61PWXzAum6s+PRHbNqqGISEqM150/ax7RmXNSNdE5w1KzosMiuwZM/5Ho6RQ9IDpsF0yvmFy9a3pyzrCGAckBFdGZw2p2Dx9f2veMuq5rq6t0/I8UNp4KK6W6hvf9kei+FD2c6upLdfWluoYnh/O6gPP4+OpdFhhSM3S6TncLdhvya112Xs0Qv2fJQM68A/KC12TvQ21lG9gTNSlHdEjKiaCoHoN7DKYonFMU5cJgtxEVvGZAXvY+ts2I8mCwNzoEEisuXn4xBCvmD9P/LccPBq24mDpcfyaW/9QH4ypSyZnDlq8AqEoVTqpKDZowrXqXqmJoHb1Sqr8ZZrdXNGnNeuBZGNifAkWxLSGFlVOY1Wok/OH4X2xQ/mdt64Und7NkLlsBy2vEVG7VZAFFweRp+K7Tp1XvQ12KloflNfiCy1mCLTfLMJqt/11kIvTOJlZcbLiMvlhhUD0nZlludknbhzor0dZjK7BA+H8A3a7ylwplbmRzdHJlYW0KZW5kb2JqCjE0CjAKb2JqCjw8Ci9UeXBlCi9Gb250Ci9TdWJ0eXBlCi9DSURGb250VHlwZTIKL0Jhc2VGb250Ci9NVUZVWlkrQXJpYWxNVAovQ0lEU3lzdGVtSW5mbwo8PAovUmVnaXN0cnkKKEFkb2JlKQovT3JkZXJpbmcKKFVDUykKL1N1cHBsZW1lbnQKMAo+PgovRm9udERlc2NyaXB0b3IKMTYKMApSCi9DSURUb0dJRE1hcAovSWRlbnRpdHkKL0RXCjU1NgovVwpbCjAKWwo3NTAKMAowCjI3NwpdCjQKMTkKMAoyMApbCjU1NgpdCjIxCjM2CjAKMzcKWwo2NjYKMAowCjY2NgpdCjQxCjQ2CjAKNDcKWwo1NTYKXQo0OAo1MAowCjUxClsKNjY2Cjc3NwowCjY2NgpdCjU1CjY3CjAKNjgKNjkKNTU2CjcwClsKNTAwCjU1Ngo1NTYKMAo1NTYKNTU2CjIyMgowCjUwMAoyMjIKODMzCjU1Ngo1NTYKXQo4Mwo4NgowCjg3ClsKMjc3CjU1NgowCjcyMgowCjAKNTAwCl0KXQo+PgplbmRvYmoKMTYKMApvYmoKPDwKL1R5cGUKL0ZvbnREZXNjcmlwdG9yCi9Gb250TmFtZQovTVVGVVpZK0FyaWFsTVQKL0ZsYWdzCjQKL0ZvbnRCQm94ClsKLTY2NAotMzI0CjIwMDAKMTAwNQpdCi9Bc2NlbnQKNzI4Ci9EZXNjZW50Ci0yMTAKL0l0YWxpY0FuZ2xlCjAKL0NhcEhlaWdodAo3MTYKL1N0ZW1WCjgwCi9Gb250RmlsZTIKMTcKMApSCj4+CmVuZG9iagoxOAowCm9iagozMjcKZW5kb2JqCjE5CjAKb2JqCjE4NjgxCmVuZG9iagoxCjAKb2JqCjw8Ci9UeXBlCi9QYWdlcwovS2lkcwpbCjYKMApSCl0KL0NvdW50CjEKPj4KZW5kb2JqCnhyZWYKMCAyMAowMDAwMDAwMDAyIDY1NTM1IGYgCjAwMDAwMjEwNzEgMDAwMDAgbiAKMDAwMDAwMDAwMyAwMDAwMCBmIAowMDAwMDAwMDAwIDAwMDAwIGYgCjAwMDAwMDAwMTYgMDAwMDAgbiAKMDAwMDAwMDE2MCAwMDAwMCBuIAowMDAwMDAwMjA3IDAwMDAwIG4gCjAwMDAwMDAzNzMgMDAwMDAgbiAKMDAwMDAwMDkyMiAwMDAwMCBuIAowMDAwMDAwODExIDAwMDAwIG4gCjAwMDAwMDA4MzAgMDAwMDAgbiAKMDAwMDAwMDg1MCAwMDAwMCBuIAowMDAwMDAwODg4IDAwMDAwIG4gCjAwMDAwMDEwOTQgMDAwMDAgbiAKMDAwMDAyMDM5OCAwMDAwMCBuIAowMDAwMDAxMjM4IDAwMDAwIG4gCjAwMDAwMjA4MzIgMDAwMDAgbiAKMDAwMDAwMTY0MSAwMDAwMCBuIAowMDAwMDIxMDI5IDAwMDAwIG4gCjAwMDAwMjEwNDkgMDAwMDAgbiAKdHJhaWxlcgo8PAovU2l6ZQoyMAovUm9vdAo0CjAKUgovSW5mbwo1CjAKUgo+PgpzdGFydHhyZWYKMjExMzAKJSVFT0YK%" # noqa
not_pdf = "YmxlYWNoPT0yLjEuMgptaXN0dW5lPT0wLjguMwpyZXF1ZXN0cz09Mi4xOC40CnB5dGhvbi1qc29uLWxvZ2dlcj09MC4xLjgKRmxhc2s+PTAuMTIuMgpvcmRlcmVkc2V0PT0yLjAKSmluamEyPT0yLjEwCnN0YXRzZD09My4yLjIKRmxhc2stUmVkaXM9PTAuMy4wCmJvdG8zPT0xLjUuMTIKcHl5YW1sPT0zLjEyCnBob25lbnVtYmVycz09OC44LjExCnB5dHo9PTIwMTguMwpzbWFydHlwYW50cz09Mi4wLjEKbW9ub3RvbmljPT0xLjQ=" # noqa
# Note: that due to the way PDF files work that simply embedding the eicar test virus into the PDF results in a
# successful scan as it sees it as text. Most PDF virus's drop the intended virus onto the machine in a temp directory.
# This PDF file contains a dropper which places the eicar.com into the temp directory of the machine hence ClamAV will
# detect the dropper NOT the eicar file.
#
# File was generated from https://blog.didierstevens.com/2015/08/28/test-file-pdf-with-embedded-doc-dropping-eicar/
#
# cat test_files/virus-letter.pdf | openssl base64 -A
pdf_with_virus = "JVBERi0xLjENCiXQ0NDQDQoNCjEgMCBvYmoNCjw8DQogL1R5cGUgL0NhdGFsb2cNCiAvT3V0bGluZXMgMiAwIFINCiAvUGFnZXMgMyAwIFINCiAvTmFtZXMgPDwgL0VtYmVkZGVkRmlsZXMgPDwgL05hbWVzIFsoZWljYXItZHJvcHBlci5kb2MpIDcgMCBSXSA+PiA+Pg0KIC9PcGVuQWN0aW9uIDkgMCBSDQo+Pg0KZW5kb2JqDQoNCjIgMCBvYmoNCjw8DQogL1R5cGUgL091dGxpbmVzDQogL0NvdW50IDANCj4+DQplbmRvYmoNCg0KMyAwIG9iag0KPDwNCiAvVHlwZSAvUGFnZXMNCiAvS2lkcyBbNCAwIFJdDQogL0NvdW50IDENCj4+DQplbmRvYmoNCg0KNCAwIG9iag0KPDwNCiAvVHlwZSAvUGFnZQ0KIC9QYXJlbnQgMyAwIFINCiAvTWVkaWFCb3ggWzAgMCA2MTIgNzkyXQ0KIC9Db250ZW50cyA1IDAgUg0KIC9SZXNvdXJjZXMgPDwNCiAgICAgICAgICAgICAvUHJvY1NldCBbL1BERiAvVGV4dF0NCiAgICAgICAgICAgICAvRm9udCA8PCAvRjEgNiAwIFIgPj4NCiAgICAgICAgICAgID4+DQo+Pg0KZW5kb2JqDQoNCjUgMCBvYmoNCjw8IC9MZW5ndGggMTE2ID4+DQpzdHJlYW0NCkJUIC9GMSAxMiBUZiA3MCA3MDAgVGQgMTUgVEwgKFBERiBmaWxlIGNvbnRhaW5pbmcgRE9DIGZpbGUgd2l0aCBWQkEgRUlDQVIgZHJvcHBlci4gQ3JlYXRlZCBieSBEaWRpZXIgU3RldmVucy4pIFRqIEVUDQplbmRzdHJlYW0NCmVuZG9iag0KDQo2IDAgb2JqDQo8PA0KIC9UeXBlIC9Gb250DQogL1N1YnR5cGUgL1R5cGUxDQogL05hbWUgL0YxDQogL0Jhc2VGb250IC9IZWx2ZXRpY2ENCiAvRW5jb2RpbmcgL01hY1JvbWFuRW5jb2RpbmcNCj4+DQplbmRvYmoNCg0KNyAwIG9iag0KPDwNCiAvVHlwZSAvRmlsZXNwZWMNCiAvRiAoZWljYXItZHJvcHBlci5kb2MpDQogL0VGIDw8IC9GIDggMCBSID4+DQo+Pg0KZW5kb2JqDQoNCjggMCBvYmoNCjw8DQogL0xlbmd0aCA4OTUyDQogL0ZpbHRlciAvRmxhdGVEZWNvZGUNCiAvVHlwZSAvRW1iZWRkZWRGaWxlDQo+Pg0Kc3RyZWFtDQp4nO08B0BTV9f3hQCRIQGRIkMfERWUkYSEoSIkYSoICuJCJUCEaEgwCY664qqiHVit1WrdVmurUldbR0VtHa1WamuLWhWttrbVFrV1fA7+c9+ARwRF2+/r9/8/B07uufvec889d75becK1etUHXheRFfRCNuhRbStkx3EjADuzFiFCAWDwAB/V1tZip06AtS3wvwqurduPJlS0ESBU47avrmUBwIUAbI1yR+WO2uu619VaQhBqxfdAIa0RujCRRuLxIA2gttblqTQLH1K/r9uiOvOaG00/yXRvkBstlagN49CI6QSGE2Oy7v3aIiSGqngy9qeZt5owY6AwC8A8yRSqOaY/mNM8ENoK3SqsHULdwX4b3Ns+xp/6erP5WUNT5WLN2+4Nw1vz09MqXWw/C6YjFAaKiOyZ+NYmTt+hkXSs7TFW+dtb2ZsLt5qZ3vOm3xSw6bH1YeWtZu/bO1pfrKzrDKz7Q4Yv1uXo5k7LrR+WO0TLHzf9onZPtv9dwKbLAtte9oxcWssPt7255tPgafJoLZdNQXPr31R+LJ+f1k9YYNvR2nya/7/LZNvraeV/1vZpCppKD+tmEhToVhghuoPU2yI3+MUjSgv894PzP12AFvhngZnnPfiny9EC/1Gw8yicrM6DKTvT/g//6QK1wH8a8AyVCEKSskKiQ/n0ELL8kMKvvNRWBNixrNTWH7BTOUIvlH/K8wR0KvuU908XuQX+bhjFd0FuIAqOhAuyRzbUH0J8rBbQYsDlT8Fwu6fjWN7Tsam4vZ7g9+/ADZw6eTSjfh81kwdcLISel0T8Z+v134rlNrQpBfQEXlaAvRKwGrCGFkUkBCQBxYCxgOmAOYDFgBbAMn7T8XC6ASBDVcDvq8Tzp/P/PV4k8G4Q4F1AAfDTEzALcAogCcmk9bZB6YD9AEckEfyipDYCPaAJ0JxErwZ7Q7I3anlgOglZ/WOH+iIDMqIipEY6RO/EueW7EwRyX3sEJELV24NKTw1mUV16rfiPa7I4pCBv1q4C04lZ1AihB8chDRoJaZdA6mYoaTrQRsAC6rcYFYJbApRAD74Y+iMtulW7DUw2FbGdE8qEsLmQggZCNyyvD3rhNrBR9q0dIhxsdDRhCy5q7MxDjgjvk48m/6jdj/fL69K0p9IhUQrkZ6Ly5tG7kCi9jw3fHfiCWeaHfj7vsg8v93149AblUJVBb9bozSMyJxRrTMNCxhfp3p//Rd+DYmH8naQHM64E9162NVbQ6efph189vHpSxZEl/h5Hq7auH3D/TsrJTOUG0kMXdDrszvIePxj3drDfcXDxsvW9t9xUdvP/1Ttx2LoLEWmnN2WVTvPy7Kle57L4j12f7AmImG1JLn2r/7qJPxYXpGyNfrXM2KFs7cl/RfK+kI/tbHloaX28pN/3nr+Wzo/84ajquys2O8t9B0X/duXeYv9LZdt3xYT3fb+3SjBjW8Xr31zbcC/hqIcy5POPu/wZsrZb6arjgw9k3mtXfnW4Rv6VfMN58qbjy24Hy51KR5woINsF/7ivx/LSa2e/Hazbc+7Vra9OCh9+rO/+WvecP2Outvn62NShU3l4L8TGil/rvl3/UwXWySCnmPcjjBqdKTQE/8587cSog2Kn2b/P2jvjq8n9Th707DQ2dF2ZKnTNZHQxYGqhyPus969n0w7bOewWzFz7+513ox9tnnfv4sKfSI+3NtodvKSK+aLw4tjofVOnH521dn0nwdDxxS+tmXh2x4g96V+t6Xlc4r0uaaf4DTeXOafT9vTfrMz/6qjwZO/4s7Jc/6nr548Z8V2HxcvbVYenfP2nR//vInI2v7bhaO22g+Jjy9P2T90SdCWer49YmKK9nLtqQ+gPGXvLNmg2ftO+csdH9759SDRW0dETFnnMAGouJYQImQs1RZpQzm+qWq8u0BixhDgdS3U4SApjL05eNa7sdETe7oD43K1fbN59G6mWeSkOrzy54ux3V36YEfHlBfdTS/s4Dw5aZu88V3M8ZM6HNZOC1ntvOJPk81mQ7uLhtj/+PN7z6PZX2lxIPmLqN7njqbJZm3acI7/d0e2U3+9B54Z/7D8qel7iwKtf3wm8FP9rt84OvMjGCy8+pwrcAJp5rTc9yj5WeAku9rXBaYaz4e77XKrGkWZDXpexXvYlc6s+WBqc6v2pfnZ7bemiqvTVsSeTJ3uf+cFCHNylnt22qmi2eug0j6p192Wnu+t8K7eUJ/Z//bNBw18Pr5q7qZe/270f7te27/Due9/9dtjJ180vMO3M8IfG4yr/GxNiF5RfP9f+duimCUldl/EX35i5fW/g/PM7XrlzYffc7wbE5U7PmSz6bszZeQt7f/fmorUuBwcN/zEwOUd/LP7ltnvmFgcK7wquHtJpP84aOmyuJLXnwmrPI+e9Xns3rTJ9wR/hxkvRdltKX3+xoiB3ZXKbgln77exLXinLOK3h+eZ+e9uSVvHaS5c/uSk82H9/3Nn3lqz4cPuYklZbCuYtHlK+c/tmw5TTu3SK4nLd+LnFmeJNd7w3dRtqO9kmZ1T77379c9feFxUnUPnvM97JWiQ9njB10/mj2kWxt8csH/vht+/LsyeVPvp82bFNfdd/rdNsHbTaLr+Dt+nujjOj8kyn93uXfRbxU80rX478cPcv1c5HLg38qeZht02nXpoXsXLOV5dzWm/yPfN7wTeORxZvu977rlfFd6PaTXppemHA6j7bPG/FBc/d4Xnhh9UuyDl/0Ff8d7W9D1XNDvjt0nj/gG+darrvX9eux+UvPvY77pg8ojJzx16XDKd2kjV3frl3Z8vymPjDLst9Y+7+fPfiwR/sY4Yov9xYtGzmlQGh+x5eO3G4w5qY5VNu3fzqhVPjBx+q/dfBLvsePJggF54s6PzlvNAVD25fOzpvS6+HD3cntq1JnZaouVt2KG710suC67c83cOyxnTq0nHMJ20PCr6+Ofe4/8LI/W/1sX24fK7PH8cDRy4Z5xPV/dMd1b7ElTUO4ycbT7iOF5aWrm67bNzqKuI345fdszOHqBNvdV0yITL5wKgfc/sfup43adBnu+e1Obxvb7JDSZhLn8Qb+jgfyR3Pvu6tk5XuJ00PLGUO13/r/0tVyZv/mt6nV9bVkHjflG6p0rUdu7w1ct5OrdOUJVk/Oz0aq9s20b4k8r2TL87z2ZV0xOFY15kd3H5cfKP78gCfgfN9Li1+w3mKzXsbeh/Zn3lXk53Wtcac5Jm415I0eMePi9DkPn0v7E//oOu5T75454N57nPbZ361Z1pUaMcP7yoiDowZMsu30r9rXkyZvXC15/UxA2wvXNG/dHVcjU1fj133W3d6dXfaFrsPtVfnyByF0WGubT/Z0/ejNvtmBHrn5fZ2di9Zn3VYdm/3geMBmfovLl5yiFj4+QsjYjaX3S8KPr20fOKAK8dulyr3CsZt7TnuY8HVsyPe/rDG/3THMuGtod/svvRgc+TE2nuX9gSsCP3E/8RGsXvhuXlVCWe3n7txYPPElHbeGwI7RqzV7ul0LVrRwe7P7qZL/p8FBr1S3a9AKi4uXlUx0tkte3LcANmcYofUiee+jlMumd+6U++PZx5c3W7z2k03Ku92m/iN7hfVnE0+bu8Na191sW30shVyB2nCxkqfQlNCt7cqBi/ZlH10znT9loPvb57VPezDXa7H3eNnD121NfyDqIUzjjqeyikv7yyTC4b4ld/suuyefHTktZwEXtG4X9d8H3zI8YWfRe0uWvJ7nXzxxP0N+mULD3++TDTExdvpX9Xj557q/f2Qt655F/eYt+Dza16OVVfOT2wTesW15y239B5LXg0K7NKlT2W/s6Plby5tf3PMmduHivu+dDjSdkufc4m5Hgc0C7bMdL6fseB0/r1jSzdlSyJa/VL8p/p6r/XlGfb9/U55LJrv7Bz2xizUR9alW62xzbEX+zlkZq7ZlvzBVrv9NVuSibHmYX0O5V16b+u2Ph3PjL0t77Zu0uo5nnH7Xb0+KK06XD3Fd+PKtNhO2eMWz105olrfd+jZlI8/i+qz6+Yri/efHnPspKDgStb6wuVHPfQO/f1mTOhWar9rwOf6hT32de18JTTpDaNn7YnMr+63+r7X+/wzNr2ynR1fNr+zPTok89NVDutc/7zP87LP6EEOqZ7ywjt+G6qnn3nwwuX02g+jCrSLt7+kWntmvZ+b++2ObVspb68MWnK1x6xB0Wd3ByUvcpW6DnAb+E7WNdcof92G0dUbz5+NdldsWDrq9TeIGT3aTf1t4iLFmvSIqCuTTF0Hf3DnjM/d9T06LD22cuHQ2amqBWf3CT8odBm5e+Mww6KfJhoGu0ZLRm/zmpCFtp3Mqvz85CN5fORPWaO7fbvs0BfHsyNrLu05JB4kiwv83s/vof3tmK6TRu4be9W55MidVTtFd8evKlVUxWaO2PqK/Unbk61fXTlm64zwxbLA0WfeLzmZ9UXEr46/mR0/6fLypk495u07ob22Osp0Ourj8fMPfRl0rerMq8Ey+fjb+XMPVEbGHPE8vPhE/pXQwZPMLj57bS84dU5JbpNS83bvnKur7Du7kPeCE7eZfwhFjY1tzl+VrdgJlDeMfF2sxjZ6NmI9PDOzk1SHA2L3mXemjxMYDF/vEs7vfG5O5aZvbGZeljuHx/j3u3bdeXOQIGS2+pMl2t1vvn20TZ70vKRwo5eg+89vjHlrf9FH116N7e/Rt/R0j/zynOmvGiKuty2Y3ycgtbN//4AQyWJj6y69FyyrnJt5z7I5cs1rWZdFa95RHNldmXSnZPLKG0uJSZ/5780VTjyF2i1Kf6t228NIr9KC0A4ewx71U5hOz7INWH3oWHXHpRtT+J8P3bXr0wt1VSd4wajp6WpDaGTyap2A9fytHvCZPGc2Zx3Rej5UD+14T54dWadkPTmph28bS0nSWBrWQlAPW1s1WyTS+9ja4Si28DcMEtqEj49RzxjwJ8dqjCatQR8tkoSIRaRGn2fI1+oLokUDMhOCI0WkyazW56t1Br0mWjRBYxLF9HJ26KnunqczpqqLSUhAb+qujhYVms3F3UNDTXmQvdoUYijW6MFvpMFYpDaD1VgQmm9Uj4OEi3ShUrE4PLRIrdWLyNwCSbRIZ5aISPN4oPJHS7CbFLtJsZsUuwGlzsuD1oYQDMG6SFmXujBhrEsY6yJjXWSsi5x1kbMu4axLuIgs1Gn1o6FG2BCRIw26JNqBpUShvTD38KZnG3ytBTEnqcz+N3XRhKEx011geRxJNZhdDd6X4PHpu04EB7E9FrW/IaTSolJEFosF3cFLTxfEq3kdYUpQI2Aj8XHCNjViKgN+TQDl3qqmbjXMAR5yoOLhBbotE14JphvjjtPC1chAjjXtgdxH4Ly+oK4d1OJFOuLbgKihGBuCytu1ho/qr+jglb89h2J2AijpvVJXAoQ6uZMInxNEvzgHseyzYfwJKif8m0XTDY7DMQhroSQ8WMWnw4rZgEbB+jkP1rohKBVs+dSaXIMkYFcAbQa3NFiRa2AtTiBXXGocsT849kbxSAWLcBwxDZb0A2DZHM9EHADuafCXDi59qQaxNLyHRDRyL4knrPOktjmFEA3hwmJeDIACjAY0oHHwi125Z9l0KLq2PLbZOTRuq8T2ZRCIx7OzseXb8mz4tV1RtWI8qmjFpsGUKBMW/0VQYRO1raCB/Eior4HaXNCDvxzS4SFbW4JH2NvxbO1ZtnKKY8E/GWgCxMmFmHhLIiyEyt3Rjs/DQOWuaiR3BTSKltnGiKDjuNjiCDYQh4eqa99HsSzrVjCEigqvhbxwXEiDrinfFmpqx8bzR0rUSLwiJpYaapkKv2ZUCD4ikLsbgjmeqOZT3mX+u1T44+ciZ1cBUk3VgPt0cnxkU4Yec2XBzwZRPepp19FaoDEotS213Y62o2nTjOKmQyXHXOvwPKlzTpD6zHDF7SRADx422VKCpHRstKqpdUGt/IG+zKfU9tSmEOsrrMikjSbn18i2odCH1nw0jGeQhZXMBUR2KvI9wvm7WaXBp9SbASS6nvpfAo9AadkRjx/s4SapnrXi5r20QuF78wWoW5dtp7E8ZBK0/sH+eEjAMXFfxIPCMkSzEp+hYL7hsxGssz5F9MDyDSBWQpcRHsZggEX0jTghQe/7+hD0BmkAQQ/QMoIah1FPJs84gp5UphB0vlgY2qP65rOmcZ5Gg8HclF9fPM/RNR1WytB4/E/V5hkNJsNIM5k2cqQ2T0MONBjzER5uAMRe7QmW9nDt4nyxkqBoz5JyfBPQBjUcOK3NFmiBFmiBFmiBFmiBFmiBFmiBFmiBfzc8af3PO3X81LIQb+GCN2H9H3RvM17/X0X0Wh374+07vD4vRvR6fxKi985nAuLPRV8GfAFwIaI3UPD+AF7Hr0X0Ov59RB+wbEf0un8Pk/ZR1Pj6vqn1M222rtukacpsL6zfu6euoOFM2jOFztSadRrELtVboAVaoAVaoAVaoAVaoAVaoAVaoAX+TwF7j429AIbX8Xg5jM/t8bte+Lwen9HjNTRem+N1Oj6bx+t+vJbH63x8lo/X8+5MHLymx+t+fG6OHynxAvRG9LVM7I+X3Pj2CInwVUl8BQihjoh+ZQb7PwTElya7AfYA7AoYhOjXkIIBQwDx3Vu8HyFB9Dl9GKAMUA4YDhgBiO/vRQH2ZNJlsQXqoT9zXYVE8dQ3cEY04ZnkxwPZEmxaWIbsWtF7SRW0dwI3bE3ISuplpEDs70e7STjf1j0PtKYu3tVDc+Lgux7s6zkDqa/58lEcmHmohLqOp3+G6zteiEfgPvMs+VPAXOKzRRlUrkXUd4kTUDLkPrLuC0Mz0lLfJjYNAZA/5jgfPf4iUVPQtkH+1jV/tvJEQv7Pyn9fTv74ImAedWnK1Jyoj0FrRFBXULFecmoijFg9h5I765e5MGQhJVI8V840CECDsnW3byLMk/LPRIXUF6B/Rf6wPGM9/Szyt4zZPH2S/mfTbUz/Yz3amP7HsvUk/Y/b/kn6H78h2RnRl+bx9WisK7D+x2MBHgOeR//jcQOPI3gciEb4bUuEYhB9Xwu3Pb6tqkL4C2IEWpBWWomASYDJTF37gJmCsLwi6ppxGiC+m9gP4W+G8RVc3JYIDUBYprBeQWgQ4GDAIYBDAbMBhwEOBxwBmAOoBswFzAPE98mwFhwJWIDo/XV8yXYU4GhAfFG3CBD3PwOi993HABqZMmKpKQEcCzgO0RcZsTZ/EXAiE2YyY05F9N1hwoOgL+It59EX9M4QtCCtA/s95lwAC8mS7x/MxD5rCPri3058E5q+bV5b/zDZMyjABoBFCYsgFj97VK9OCUq4MYUr89Q3xejsU+O5hWF/6cJeYOxO//hLpfTrqRkiupwEyhDWUYwb+PekKbb0BPQJSYMv0EOeqEFwbILA/YBnoWc+QsTkTNQ3FVsiTOP+yb7jmlTH03qVxa2Btb057tb5PZlHdPgLzZOhxuHR50QdI7iyxDB7STlSmM1GbW6JGWnILOWIvmpgIBlNijILgatxhrySIo1eaBY5Ozi8rFSbCN7LEvreKC+kXVaizpCr1vkRGzOK1XmELkEt0Jk0TqtVRo1aaFbn6tp2SDdqyHxNnk6N7JLzXdHyTGOJszJ+fLHAYNK4+2RqioqRTm3WxGmMWt7Ytv6qEpPZUOSkfXGmaoakOdWr67/XmP47h+m/t8DegxkR6f47KYD+ZosGpv82/BLj2UEIkuSAnm3+g9cOM1vT9AhmBB5h9alHc8HrOeYfvQEDHWg6H/qOsdm5PQ6C55h/4plOGKNaG9Z6HKXfnwXcnyN/fE7LmfM07IrPmD8DzdPfT30/qvn6+8l64z8B3FLVA0FUMaWLfIy1DTWgdS0yKJ+ZFJdyZnblse4CzodVOTOD6tx5DdxD6txbMe6sksOzBEe37nX+z1MebMejAWH1kVdZEy3RlLs1/6zL0pwYf3k0yECiaYL6h80tAj6WUQGHFnJoT4qmH5yhCjGNdiexA9B4chrA0HgCO6iR8FjXWZgwrRm2sfRKDl3OoSs4dCWHrubQNRyaUvIMLeTQJIcWc+hYDp3OoXM4dDGHtnDoMg69kkOXc+gKDl3Joas5dA2HppQDW34OTXJoMYeO5dDpHDqHQxdzaAuHLuPQKzl0OYeu4NCVHLqaQ9dwaEr9seXn0CSHFnPoWA6dzqFzOHQxh7Zw6DIOvZJDl3PoCg5dyaGrOXQNh6ZWf2z5OTTJocUcOpZDp3PoHA5dzKEtHLqMQ6/k0OUcuoKhcV+rxLRVn8L9rgpc+NMElOgIbKkwiA2DgSA8wXURn/5YcBi6yY9lzEGMiZUjCf2uCwriXYc5kRfKyDNqi81afUFIglanyZhgMmuK0nJHafLM/iB/BEy4Q3gkQIZmJ7AMPIv9kRTcd0IHzIblMAn+fkjGc4U0u/JwAiSYJKT/HjoG0kyaaHozaIpBcReQ1sja5ZQ9W8Ta0yh7cV/W7kfZ9SRrT6fsSi1r70SnZ2btsZSdrAuvoO1BDePX+w+l7EF1dhntn87as2m7qmH89BLWPoSyq5QN61diblg/5YSG6Zvr8gug7BPMVuWrq99wOryGtQfS9U1g7RGUXRPA2lWUPUHb0B7Q2Sq+rmH8zkmsfTJl19Xl50/Zk8JYezydn561J1N2uaphfvqShvwXB7L2/jR/ilh7MGUPrONHBmUvqmuvzEbbs769+lr5x1m1t8LKv79V/Dir9g62am+FVXv3tWrvTKv2TrZq7yyr9k62au/+Vu09wKq9M6zamy4f7r+N8yfeqn4ZVv6ZVvwJtvJPsIqfbMWfFCv+xFvxx8+KP/5W/Emy4k83K/4kWfGnK4c/+Iv7nagdik9WKfqTZo3JTI4EdUWOM2rNZo2+O4mw1nFFithw0E1yKUgVaQC9eNWJ1o4C57oVcMzWplbAqQZYGeo0Elj7ZqCSXFJRYjakoWKNPiDQ2QHPK+K0RaQpQSvQafTERwoTmRGIZuoL7CO1vMiSItB54cl6s6ZAozTaRxoSMtIIcVo1pUltQmxs7cIroskEo0ZDxNl4CjM0ZpIXHg3MhKWzWWPTI0BUp4n5BSFEEKWIvYpsvEX89+zXR5PxSD9WazToA0QE6lQsCiQ7kyKnbBGyJYJDEjXmGryynmax6T6DN20iIRibYDCSSq0eqY0TSEVenqbCZCIHGrWWt5FZNWv4DO90aJZZgiAsDSrlBLMmJ6BzkjySn9JWGPa63EcoS/ARSiU+wmy5+G3yPDlMKHtEyl9X+gSGyXwEcpWv4ONhErlimHCKabhE+rZn8nCTPD5bIo3y4YfdjPDhy8I68O/nThVOFn4TETdJKPVNkJ0f7ttpaC35KFe61/Ocj3yyRD76Efm2uyy6t2yE0/c+8vsPyeFOUt9OUxae94kJ9E3YLg9/QMp6CXw7yfcOueHSUV9SFDV6hbsTb6pwqhNvhdyXn9athyDUzocvvf97e2++bIuHEw+ji6YWE468LROGOb0WFmT3e871IDtlh05nfGQd+D78AYZqlc5g0mS9fGsoTPlTTQVKw3hSVCdZZVi0alaS4yrKcijJUr/xzpvOpEO8Pp8sW+vsgJyZlfTzwTH1m9SyAr8QiQf4y/z6BRUBy0ke+hcIfjZKRBORmPqTAsqgsyVAhxUzf1xK9Zgb90+GwkGxdwQzBEnAjAJUoe6QA70ST4RfBZoCftngnoZSqScn+jIuqaAEVFS4DMaF3r1gzQiGigcqBBRqCvx1BBe8P1dC7deR4KsGmxblIfrlSSOYClQMfzrKtf6UpeGJiJB4nA9RgPK/xIdIKKccTDGHD/g0hn4fs4gqo5baHTJR9dfWndYY0Ejq3DANzJGUuwZCcG0SyAXHyUADwb0/8CMEzBTgQMdGU2LP4kgqZgiUCaeeW/duCP1aZi5VMutzyj2NSkmYFU+elTtSqhRc7gyEEuihjPhxEMwRE5QDv9+pAV6FQfhsypZPPcWhoeKbgcqF+GnUoyVk3VsnTZ3vuVE1UTXY5W3MrW/XVQPZDfGAOtmQApcToFXjqFqooH5y4HcC5BoMXBUDKoFSgn98neQo4I+utYxxl9bVvvmyoaLqVETV5+lSk4EKqVbUAK+w1CTAH+5b8RypSeP0oadJ3vPIDA/+CGSHnGD90BqGSbzsdAN0B9oD0IfH3ZlA6I1uqwbaP3GP4mk7GETd8zENYcn3+JWcpnby3aFWYdASEVAHCbSKGmqqgZQ68JqOU1uLj2uYakKxqE2c1lbbve5Uo3MTlsNfbW1nnnXI2tpJAXRq+OFfpHBia0oQkby/dzfO829OzxrQvzn9/48Q1ONsvm7Q9JS3DFWXZy348xJ2w3K+f5/r2Y2fbo7fmVnV/XCYfgbrjk28n2ihRMqX8EI2RBAIGJ+Pn9XYMVqIbPhZSsWdS0Jkyx+o1UvC909hyDCp/VSGDJeNn4pDpqrztm0VQmSIEr6pI0NFbAZKwE83GvCUU+LwghDZ8U3mfINOsyBHiOxZn+DNQuTEzyzUsoc95p7LhagVFK3WxgaNiB+r1pXAlNTztBA/Vk0d/VyoAnqmgApAPwfStgSnCBNmPF/OdYX4jDeeM49IgzlzFlnvaMJbC3p1kebNc1BExlFLOZYUeThCBRg3PGsO+wbKh1TUvJjeiLg7FzJD8fQE2DtIiByZwsLEF897YQKvaeeEi0vP5t7sz+ZCILaOo76uz5ma10MRT3cVIh7u0jmI7SIing1Q/jw+/HYBleiCBDysxByesRN58fB2Na1QO/CoV9CYZ2T8nzZDfDIQoVstBKKfNhN35fHKWqFid7skGzSdh/Iv8/nAKB/EtLRtP7xTG8tzt+O5R2/iOdjzdIS7wM6tFc/NQut2J15vt548Bw9EGEFShCAqvXjtqOe5mRE9HTmDwu00HNnAgJs4USwWS8XlsjBxMEgaX4UcbHitCTexWBY+GYbPEHFHcUcVjJsgq/mGcTAUmqjtpDC+NNtGIw0x62BekJYSjxdWFkOR2qw16PH+H29GPC1ntkBYVNSA38+S0BqRFtdWFqKrU7bKxs3BDk8A+AI0w48SQdeZBTAeWvBw2HZ6wXJLh4XT/RInwnQgITJOLFPBPECZoAiWiCWxymBlXLzcUqAYpbBoZBZb6ZwCyy4Y09VFJBZBZMpWGYqKDHqCb8e8eoNGmsmMQrURBuvstISEZBWM0rLs1Iy0kDjPlJSOs4UzUkmJjAwRk2m5034gUyq0uUZYaVk6WZDIZfqLPNTmAAFDXDulhu5miJbBROTgWdEqs2IkHsBEcRVJMIDFJmvQRn2F3utLR+kZR58Ktw0IJSkJibK0tuItj/ZKHmEbdMDvwUxRW6Wgm5JoN30VdDoL7nWJqDWy9Uq1+OVb/Ka9IkFeRa0EUqEgzVdhWZfqOykowO+At8pXeKDhIRS33z/hKJ3p2X/5uPQxSI6LFk2MkKpkKokyPDhCohAHy8ThCcGKMElYcJRSFREZFyEGV8VkWIWzBY3mljq0c5KYAWcHupzRTHGdHbAqiBYx8g8pJGl0xdQjnOPNOGex6PlOXesBn//ioxR8Pwh37ubEmQK4hJkGEcy8sZiZtT0ruD3H+ecshP7Sma81PGv+fzf8lfynAU4HxE+n4gkjviXyEmO2wP99cHbIol+SBZ1fDCNQrk4TJo0WhUWFSaVSUCmgM1SpidGiiLAIiTIiPFIiDldxEWuldGW0KE4cJ5XIoiIkchrBPVEVLZLGSRPioxTxEqVYIVFhTJCDl7PD0CSDyUzGjzdr9PkaI5msH2kY5uxQp8ok0RPDIsOkceEycbAqIUocLIGYwZHxsrBgsVghVkVJJGKxXDG5R5YyvgdXAULKMIUcbSpW52kgQa6ijBYHkXX/KlZXSqKl8iASo0QSHhVEymWSIHJIsze1COgpNg50H7S++0wi7rtxeGZLRkUEA1fDyLohB+eTmoH9wImaEmE6hPUPiUR/RG0d86y9uvnwP1QBLuwNCmVuZHN0cmVhbQ0KZW5kb2JqDQoNCjkgMCBvYmoNCjw8DQogL1R5cGUgL0FjdGlvbg0KIC9TIC9KYXZhU2NyaXB0DQogL0pTICh0aGlzLmV4cG9ydERhdGFPYmplY3QoeyBjTmFtZTogImVpY2FyLWRyb3BwZXIuZG9jIiwgbkxhdW5jaDogMiB9KTspDQo+Pg0KZW5kb2JqDQoNCnhyZWYNCjAgMTANCjAwMDAwMDAwMDAgNjU1MzUgZg0KMDAwMDAwMDAxOSAwMDAwMCBuDQowMDAwMDAwMTg3IDAwMDAwIG4NCjAwMDAwMDAyNDMgMDAwMDAgbg0KMDAwMDAwMDMxMiAwMDAwMCBuDQowMDAwMDAwNTE3IDAwMDAwIG4NCjAwMDAwMDA2OTIgMDAwMDAgbg0KMDAwMDAwMDgxNiAwMDAwMCBuDQowMDAwMDAwOTA3IDAwMDAwIG4NCjAwMDAwMDk5NjcgMDAwMDAgbg0KdHJhaWxlcg0KPDwNCiAvU2l6ZSAxMA0KIC9Sb290IDEgMCBSDQo+Pg0Kc3RhcnR4cmVmDQoxMDEwMg0KJSVFT0YNCg==" # noqa
# preview_errpr.png is in the Admin repounder app/static/images
# cat /Users/richardchapman/code/preview_error.png | openssl base64 -A
preview_error = "iVBORw0KGgoAAAANSUhEUgAAAygAAARuCAYAAAA4UhMOAAAAfXpUWHRSYXcgcHJvZmlsZSB0eXBlIGV4aWYAAHjaVY3LDYAwDEPvmYIR3CZ103FAohIbMD79AIJ3sZ0ojuznUWUZOMVSdhYSDStW4tqMYxIBDQhdsd0jTmXxDGsmzCwa8SOBzpqNNSmVIYPpKUX/8jmODh1GV8h3wX2kF7kAOKUlyVaeo4EAAAmNaVRYdFhNTDpjb20uYWRvYmUueG1wAAAAAAA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/Pgo8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA0LjQuMC1FeGl2MiI+CiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogIDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiCiAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyIKICAgZXhpZjpQaXhlbFhEaW1lbnNpb249IjgwOCIKICAgZXhpZjpQaXhlbFlEaW1lbnNpb249IjExMzQiLz4KIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+CiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAKPD94cGFja2V0IGVuZD0idyI/PhuCQwAAAAAJcEhZcwAACxMAAAsTAQCanBgAACAASURBVHic7N15tNV1vfj/1zkgw0EBmZVIRQXMKUTTwgET5V4z65arTpKX1Lya4+p6q69ZVqbVNdOm69AtrZy66kpzqBxLLaMsQUSNFIcIFGUSmYfz/v3Bb+/O4QwcBDwv4PFY66x1+OzP57Pfe7PRz3N/ppp58+aVAAAASKC2owcAAABQIVAAAIA0mgVKKY74AgAANr72tEbnlmZu7XcAAID1UVNT0+TPlb5Ye3pF58YzlVKa/A4AALAxVIKkpqYmampqopTSYqR0roRIQ0NDk5+IaHUhAACA9mjcFLW1tU1+WuqN6h6UhoaGWL16daxatSpWr17dZG8KAADAm1XZa9KpU6fo3Llzk2lraxIoq1ativnz57+1owUAALYa22+/fdTU1ERtbW2Lh3pVT5Kv7EGJiNhnn33e+pECAABbtClTpsTq1aujU6dO1Wlr70WpjQgnxgMAAG+JdbVHbeMZBQoAALApras7atd1HWIAAICNqdIeLYVKszvJAwAAdBSBAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKABERpaMHsBnxXgFsQgIFgIiImo4ewGbEewWwCQkUAAAgDYECAACkIVAAYNNwrgrAmyBQAGDTcK4KwJsgUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAYAtS+noAQBsCIECAFuWmo4eAMCGECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgBsiNLRAwC2LAIFANgQNR09AGDLIlAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECbE3cUA4AkhMowNbEDeVg0/NFALBBBAoAsDH5IgDYIAIFAABIQ6AAABubw7yAN02gAAAbm8O8gDdNoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAICNrXT0AIDNl0ABADa2mo4eALD5EijA1sS3ugCQnEABtia+1YVNzxcBwAYRKADAxuSLAGCDCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAYEO4czywUQkUAGBDuHM8sFEJFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABgK1L6egBALRFoADA1qWmowcA0BaBAgAApCFQAACANAQKAACQhkABAADSEChAqy666KIYNGjQev9Mnjy5o4f+lvvQhz4UgwYNivPOO6/F6Z/97Gc7aGRrHHDAATFo0KC44oorjGMr9IMf/CAGDRoUI0eO7OihAKxT544eAJDXokWLYvbs2eu93KpVqzbBaHKbN29ezJ49O954440Wpy9cuLCDRrbGa6+9FrNnz44lS5YYx9ahRKOrdS1ZsiRmz54dnTv73z6Qn/9SAetUU1MTN954Y7vnHzp06CYczeblHe94Ryxbtix22WWXjh5KCiNHjoxBgwbFDjvs0NFD2dI1uZTwoEGD4sADD4z+/ft31HgA2k2gAOtUW1sb9fX1HT2MzZJDmZq67bbbOnoIW6X6+nr/hoHNhnNQAACANAQKsElNnz496uvr4/TTT49SSlxwwQUxYMCA2GWXXeLMM8+M+fPnx0MPPRT19fVx0UUXxcqVK+MTn/hE9OnTJ4YPHx4XXHBBrFy5srq+5cuXx6WXXhrjxo2LnXbaKXr37h177713nHvuufGPf/yjxTF86Utfivr6+pg4cWL85S9/if322y/69u0bhx9+eNx3333tfi3PPvtsnHbaaTFixIjo1atXjBgxIi688MIm41vbhRdeGPX19fHDH/6w2WNPPvlknHLKKTFy5Mjo3bt3DBw4MN7znvfE17/+9Xj99debzf/DH/4w6uvr48Ybb4xly5bFZz/72dhtt92iZ8+eMWLEiDj11FPjueeea/frqVi9enXcdNNN8eEPfzh23nnn6NmzZ/Tu3TuGDRsWJ598crz44otN5n/kkUeq38i3dY7So48+GvX19XHCCSfEihUrIiLirLPOivr6+rjzzjubzFtZ34IFC+K5556LU045JYYOHRo9e/aMoUOHximnnBLPPvtsq8+1cOHCuOSSS2L//fePvn37xo477hgf/vCHY+rUqTFz5sz13oOwYMGC6jIrVqyI3/zmN3HEEUdE//79Y+DAgXHIIYfEdddd1+L5VpX3p7XPc+W9iIhYtmxZfPvb347Ro0dH//79o1evXrHXXnvFZz7zmXj55ZebrHf58uUxfvz4qK+vj1/96letjn3JkiVx/PHHR319fTzyyCMREfHLX/6y+u+wJYsWLYpvfOMbceCBB0bfvn2jd+/ese+++8YXv/jFmDt3bpN5K/9eP/axj8Xy5cubrWvatGnV9+4Pf/hDs8cbGhriE5/4RNTX18ef/vSnVl8HsBWbM2dOefXVV8vMmTPL9OnTyxNPPFEASinlc5/7XImI0qlTpze9jscee6xERBk8eHC5+uqrS6w5ebdEROnRo0d54403yrXXXlsiohxxxBHl05/+dJN5hg8fXhoaGkoppUycOLHstttuTR6vqalpsr5f/OIXzcYwevToEhHliiuuKG9729uaLP+tb32rXa/jjjvuKN26dWvxeceNG1dGjRpVIqKcccYZTZY77LDDSkSUU089tcn02267rck6unTp0mRcgwcPLs8++2yTZU499dQSEeVTn/pUGTNmTHUc22+/fXW57t27l7vuuqvZ+HfaaacSEeWb3/xmk+lz5swpBx10UJPn7tSpU5M/19XVlTvuuKO6zGuvvVa22WabEhHlO9/5Tqvv2fHHH18iohx77LHrHEfluW644YZSV1fX5Pkbj+Oxxx5r9jwvvvhiGTZsWIvLbLvttuWb3/xm9c/t9fLLL1eX+elPf1p9vb169Wry/rzvfe8rixcvbrLsdddd1+rneffdd69+nqdPn15GjBjR6nu/3Xbblfvuu6/Jut/3vveViChHHnlkq2O/6aabqp+pOXPmlFJKufzyy6ufq7VNnTq1+vdS+amtra3+3q9fv/KHP/yhyftdeezBBx9str5LLrmk+vi5557b7PHKfxNqa2ur4wO2Hk888USZPn16mTlzZnn11VfLnDlzyrx585r8CBSgVRszULp161a6dOlS9txzz3LhhReWs846q3z6058upZRqoPTq1atERDn00EPLxRdfXE466aTy7W9/u5SyZmOusiHer1+/ctNNN5U5c+aUhQsXll//+tdl1113LRFRttlmm/Loo482GUMlUHr37l26dOlSPvWpT5ULL7ywjBkzprzyyivrfA3PPfdcNSD22GOP8tBDD5Vly5aV5557rpx++ulNNuzaEyivv/566d69e3X+f/zjH6WhoaEsXbq03HzzzaVnz54lIsoxxxzTZF2VQKmEzX/+53+WefPmlYaGhjJ16tQycuTIEhGla9eu5fnnn2+ybGthUF9fX934v+aaa8rcuXNLQ0NDee2118q1115b+vbtWyKi7LnnntUN61JK+fCHP1wiouy///4tvmeNX+PPf/7zdY6j8v5169at7LDDDuXaa68tzz//fHnppZfKRRddVH28ceyUUsrKlSvLPvvsU42RH/3oR2XOnDll7ty55Sc/+Un1vdyQQKmpqSk777xzefTRR8vq1avLokWLyre+9a1qTJx22mlNlq0ESuXzfMghh1Q/z5dffnkppZSFCxeW3XffvURE6du3b/nxj39c5s6dW5YuXVp+97vfVf8u6+rqytNPP11d96233lrduJ85c2aLYz/66KNLRJTjjjuuOq21QJkzZ04ZPHhwiYiy4447lp/97GdlwYIFZfHixeXBBx8sw4cPLxFR+vTpU/7+979Xl9t3331LRJTPfe5zzZ5/7Nix1fdu5MiRzR7/0pe+VCKijBkzph1/E8CWRqAAG6QSKJU9Ge35ueKKK5qsoxIoEVF22GGHMnfu3GbPUwmUiCijRo0qK1eubDbP+PHjq3sI1t74LqWU+fPnlyFDhlQ3mhtvTFcCpbIXZX1Vnrt///5lwYIFzR4/66yz1itQfvnLX1Y3TBuPs+KKK64oEVF69uxZli1bVp1eCZSIKGeffXaz5d54440ycODAEhHl4x//eJPHWgqDWbNmVdd39dVXt/jar7nmmuo8L730UnX6XXfdVZ3+zDPPtLpc3759y/Lly9scRyn/DJRtttmmTJs2rdn6zj777GosvP7669XpP/zhD6vLPvLII82We/zxx5vsqWqvxoHSo0ePFmOgsmemtra2/PWvf61OrwRKRJT99tuvrFixotmyX/nKV6qvt6X/7y5ZsqQaAR/4wAeq05cvX16NxksvvbTZcq+88ko1nO6+++7q9NYCpbKHp0ePHmX69OnN1rdgwYKyyy67lIgoJ598cnX6BRdc0GKALF68uHTt2rXU1dWV2traUlNT02wvSWVvY1t734AtV3sCxTkoQLtMmzatXT9rH6/e2Jlnnhl9+vRp83k+//nPN7tXw7Jly+LWW2+NiIizzz67xUv29u7dO77yla9ERMSf//znmDp1arN5BgwYEKeddto6X2tjK1asiDvuuCMiIj796U9Hr169ms1z3nnnRdeuXdu9zsr5JXPnzq2eI9DYv//7v8eMGTNi/vz5La63rq4uzj///GbTt9122zj33HMjIuL2229v8fyAxjp16hTXXHNNnH/++TF+/PgW53nXu97VbNwREePGjateKvj6669vttxPf/rTiIg4/vjjo0uXLm2Oo7Hx48fHsGHDmk0/4ogjIiKilBLz58+vTv+///u/iIh4//vfHwcffHCz5UaOHNnqa2uvs846K3bcccdm008//fTo169fNDQ0xM9//vMWlz3//PNjm222aTb9Rz/6UUREnHjiibHPPvs0e7x79+7xuc99LiIi7rjjjupr7tKlS3z84x+PiJbf95/97GexevXq2GGHHeKoo45q83WtXr06rrnmmoiIOOOMM1q8PHivXr3iM5/5TERE3HjjjdXzrY499tiIiJg0aVKT85AeeeSRWL58eYwZMyb22WefKKXEQw89VH181qxZ8Ze//CUiIj7wgQ+0OT5g6+Uyw8A61dTUxA9+8IN2zTtq1KhWHxs+fPg6l29pnscff7y6sf2+972v1WWPOeaY6u+///3vY++9927y+LBhw6Kmpmbtxdo0ZcqU6s0Xx4wZ0+I8O+ywQ4waNSoeffTRdq1z7Nix0b1791i6dGkcfvjhMW7cuDj66KPjmGOOiZ133jl69OgRPXr0aHX5Aw88MAYMGNDiY4ccckhErDnp+YknnmgSGGsbMGBAnHjiic2mr1y5Mp555pmYPHlyPPzww9Xpixcvrv7euXPnOOGEE+KSSy6JG264IS688MKorV3znddLL70Uv/3tbyMi4hOf+ESrz9+SESNGtDh95513rv5euellKaU6vtb+biLWvN8tbcy3V+PPVWN1dXUxatSouOeee+KRRx6J8847r9k8LX2eZ86cGX//+98jImKvvfaKV155pcX1V0K8lBJ/+tOfYty4cRGxJmq+853vxOTJk2Pq1Kmx1157VZe57rrrImJN5K7rpoxTp06tRuc73vGOVsex6667RkTE0qVLY8qUKTFq1KjYb7/9YvDgwTFz5sy47777qtF07733RsSaoJw+fXpMnjw5HnjggfjQhz4UEWtO1o9YE4477bRTm+MDtl4CBVin2tra+OQnP7nB69ltt93WOU9L3+I23nBqa6OmX79+UVdXF0uWLIlXX331TT1/W8/9tre9rdX5hgwZ0u519uvXL66//vo44YQTYsmSJfGrX/0qfvWrX8VZZ50V73znO+Pf/u3f4iMf+Ui7NtbbGkdrG5xrW7RoUVx33XXxhz/8IZ566ql46qmn1rn3JWLNhvIll1wSL774Yjz66KPVPRg33HBDRETsvffeMXLkyHaNoaXxN1ZXV1f9ffXq1RERMX/+/Oo42/pcbOhNMttad2W8rb3XLX2eG19t7uyzz46zzz57nWNofEWvfffdN0aOHBmTJk2K66+/Pr7xjW9ERMQzzzxT3TvRUni2NY72hmRlHDU1NXHsscfGlVdeGffee2+zQBk7dmwMGTIkrrjiinjwwQery991110REfHBD36wXc8HbJ0ECvCWaenwqMa6du0a3bt3bzZ96dKl1d/b2rNQU1MT3bt3jyVLlryp529Je5+7b9++67XeD33oQ3HggQfGrbfeGr/4xS/i4YcfjtWrV8fkyZNj8uTJ8eUvfzk+//nPx1e/+tVme3223377Vtfb+JCwti5/XPHrX/86Pvaxj8WCBQuq0+rq6uK9731vHHzwwTFkyJA45ZRTWlx2xIgRcdBBB8XEiRPjuuuui4MPPjhKKdXDuyZMmLDee6zW53CwUkq75uvUqdN6jWFtbX1uKuNt6b1uz+d5+PDh0a1bt3WOYe31nHTSSXHWWWfFDTfcEF/72teitra2uvfkPe95T7v2VjYexzve8Y4WD0VbW+O9Mh/4wAeqgdLQ0BCvvPJKTJ06NQYMGBB77bVX7LjjjlFTUxN//etfY9asWdGnT5/qZb0FCtAWgQKk13iDfMaMGa3GwOLFi6vnwAwcOHCjPHfj9cyYMaPVc2hmzZq13usePHhwnHPOOXHOOefE3Llz44477ohbbrkl7rvvvli1alVcfPHFcfDBB8e//Mu/NFmutfu9rP3YoEGD2nz+p556Ko477rhYvHhxDBkyJM4999wYPXp07LvvvtWN1SeffLI6f0NDQ7N1nHjiiTFx4sS45ZZb4vvf/35MmTIlpk2bFp06ddrgcz/WpU+fPtG7d+9YsGBBzJgxo9X5XnrppQ16nlmzZrW6wV953nW914317t27+vt3v/vddZ4r0pLjjz++eu+fhx9+OA499NDqnqv27D1Zexw//vGP44ADDlivMYwZMya22267mD17dkyZMiWmTJkSERHvfe97o7a2Nvr16xcjR46Mxx9/PB588MHo169fLFmyJIYOHdrs8EuAxpwkD6TX+Bj7p556qtX5nn766ervbR0GtT6GDx9e3QvQeP1ra+smgmtbuHBh/PGPf4zHH3+8Oq1v375x4oknxi9/+ct45plnqoc0tXRDvmnTprW67meeeSYi1uw12GOPPdocx5VXXhmLFy+Ozp07xx//+Mc455xzYv/992/yTfrMmTOrvy9btqzZOj760Y9G9+7dY/78+fHwww9XLyjwr//6r+u10f5m1NTUVE+of+yxx1qdr63H2uNvf/tbi9NLKdXPREsnurdm9913r+7V+fOf/9zqfG+88UY88MADMX369GZ7aPr06VM9yfznP/95TJw4Mf7+979H9+7d4yMf+Ui7xrHnnntWf2/8WVzb3Llz4ze/+U288MILTW5M2bVr12o833PPPdXzjsaOHVudp/L7Aw88EHfffXdErNl7sr571oCti0AB0nv7299e3Zi65JJLWvwmPyLi61//ekRE9OzZs82TptfHwIEDY/To0RERcdlll7V4WNFvf/vbNsNpbWeeeWYcdNBBccYZZ7T4+G677VY976Gl1/rEE0/E7373u2bTSylx+eWXR0TEoYceus4rplXiY8SIEdUrcq3tpptuqv7e+JCgil69elVPgL799turgTJhwoQ2n3tjqRx+9rOf/azFkJg9e3a7L/DQmu9973stTr/zzjtj+vTpERHV96A9evToEYceemhERPzP//xP9SIMa/vud78bY8eOjWHDhjUJxYqTTjopIta87zfffHNERBx33HHRs2fPdo1jhx12qJ4jdNlll7V63tHXv/71eO973xt77LFHkyuoRfzzal6//vWvq4FSueJa498feOAB558A7SZQgHUqpcRzzz3X7p/2npy9Pv7f//t/EbHmqlpnnHFGk42phoaG+MY3vhG33XZbRKw58Xh9Lvu7Ll/4whciYs233eedd171JO2IiOeffz5OP/309Vpf5RvuiRMnxhVXXNEseu6+++7461//GhFNN/YaO+200+KFF16o/nn16tVx/vnnV/cWXHDBBescR+Uk/GnTpjXbO1RKiW9961vV80kiWg6UiH8eUnTjjTfG5MmTY/vtt4/3v//963z+jWHChAmx2267RUNDQ4wdOzYeeuihaGhoiFJKPPbYY3HYYYe1ek5Se913331x6aWXNvl7mjp1avXk9iOOOCLe8573rNc6K1f8mjVrVpxwwgnNNvzvv//++OpXvxoRazboW9ojeOSRR8bgwYNjxowZ1Qhr7+FdFZ///OcjYs1eopNPPjkWLVrU5PHbb789vv3tb0fEmiuD9e/fv8njRx99dHTq1CkefvjheOGFF2LXXXdtMtaDDz44unTpEjNmzIgXX3wx+vXrt97vFbD1cQ4KsE4NDQ2x++67t3v+8ePHb9BlXVtb58MPPxz/+7//G1dddVXcc889cdhhh0W3bt3i97//ffVciaOOOqpdG+frY9y4cXHOOefEd77znfjv//7vuOuuu2LMmDHx6quvxj333BMLFy6MYcOGtXoo0NqOPvroOO644+LWW2+NM844I6699to48MADo0uXLvHkk0/G/fffHxFrDpOqfEPd2HbbbRdPPfVUvPOd74xx48bFgAEDmuzF+exnP9uuPUgnnXRSfO9734vFixfHu9/97pgwYULssssuMXv27Ljzzjvj6aefjjFjxsSUKVNi3rx58dprr7W4nsMPPzze/va3Vy+de/zxx2/UQGzLNttsEzfccEOMHTs2ZsyYEWPGjImePXtG586dY968edGlS5cYO3Zs3H///W/6ZPnKvUBuuummGD16dMyYMSPuvffeWLJkSQwePDiuuuqq9T5k6cgjj4wvfOELcdFFF8UvfvGL2GOPPeLwww+PwYMHx5NPPlm9GtawYcPiqquuanEdnTp1igkTJsTXvva1WLp0aey8885x2GGHrdc4jjvuuDjzzDPj+9//ftxwww3x29/+NsaMGRMDBw6Mxx9/vLpXZNSoUXHppZc2W75Pnz5xyCGHtHh4V8SaCy6MHj06fvOb30TEmj0uG3rRAmAr4E7yQGsa30l+fX7Gjx9fXUfjO8m/8MILLT5P5U7yXbt2bXM8DQ0N5Xvf+17p169fs+fs379/ufjii8uqVauaLVe5k/w555zzpt+LhoaGctlll5U+ffo0ed66urpy7bXXVu/y3p47yZey5o7b//Vf/1Xq6uqavZauXbuWc889tyxatKjJMpXnOOKII8rtt99etttuuybLDRo0qFx11VUtjr+1O7jfd999Zbfddms2hj59+pRrrrmmNDQ0lI9+9KPV523NF7/4xeqyjz32WKvzretO8rfcckuLyz377LPVeSZNmtTs8enTp5fjjz++yXtywAEHlIkTJ5bLLrus+praq/Gd5O++++7y/ve/v9l7dOyxx5Z//OMfzZat3El+XZ/nUkr5yU9+UoYMGdJs3Z07dy4nnXRSi3ewb+xvf/tbdZkvf/nLrc7X2p3kS1nz2b7yyivLwIEDm42jS5cu5Ywzzmh2N/jGKu9vRJSbb7652eMXX3xx9fE777yzzdcDbPnacyf5mjlz5pSGhoZYuXJlLFu2LBYtWrReJ/sBW67XX3+91ePj21JXV1c9/2HFihXVe5IMGjSoxZvHLV68uHqIS1v3GqlYuXJl/OlPf4rp06dHKSWGDh1a3QPRkldffTVWrFgR2267bZMrF70ZixcvjokTJ8aLL74Y/fv3r35jP3/+/Fi8eHH06NGjyVXHXnvttVi+fHmz6RXz58+Pp59+Ol566aVYtmxZDB48OEaPHh3bbrtts3lPO+20uPrqq+OII46I+++/PxYuWVs2gQAAF3RJREFUXBgPPPBAzJs3L3bdddd497vf3eqei5dffjlWr14dvXr1iu22267JYytWrIhJkybFc889F6tWrYqhQ4fGu971ruq6Fi5cGAsXLoyampoYPHhwi+u/8sor4/TTT4+99torpkyZ0uoehdbGUbn6WJ8+fZrc86Ri1apV1UMHBwwY0OrfdSkl5s2bF3V1ddVL81544YXxpS99KXbeeecmh8W15ZVXXqmel/PII4/E6NGjY9KkSfHEE09Ez549Y7/99mv1/irr+3muXF562rRpsXTp0thll11izz33bPeV6GbNmhUNDQ3Rr1+/Vi9Z/MYbb8Trr78enTp1avV8o1WrVsVf/vKXePbZZ2PlypUxdOjQ2HPPPaNfv35tPv+yZctizpw5EdHy382SJUti3rx5EdH6fwOArceUKVNi2223jW7dusU222wTtbW11Rv9VggUgM3E2oGSRSklRo0aFZMmTYqrrroqTj311LfsuS+99NJ4/vnn46ijjmr15Ovx48fHjTfeGMccc0zceeed7Vrv2oFSuQklABumPYHiJHmALV/77mj4Jt12220xadKk6Nmz5ya/98na5s6dW917s3DhwmaP//GPf4xbbrklIqLZ/WTeIpv0vQfYEtnPCrDl2+g3nbj++uvj3nvvjZdffrl6gvSnPvWpFg9N25Q++clPxlVXXRUvv/xyvOtd74rTTz89hg8fHosWLYpHH300rr766li5cmXsvffecfLJJ7+lY/v/ueEHwHoSKACst5UrV8Z1111X/fMBBxyw0a+e1h677rpr3HLLLTFhwoSYNm1anHPOOc3mOeqoo+InP/lJq+dnAJCLQAHYTEyYMCEOOuigVk9yfisdfvjhce6558asWbNi//33j//4j/9o8eT2t8LYsWPj+eefj5tvvjmmTp0aM2bMiG7dusWQIUPigx/8YLzzne9c78sA9+rVK6699tqIiPW6xDYAG85J8gBUlHBIUnt5rwDeBCfJA7A+bHC3rfEJ794rgE1EoABA+4gSgLeAQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQCoKB09AACBAgBU1HT0AAAECgAAkIZAAQA2hMPCgI1KoAAAG8JhYcBGJVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQDoaK4EBlQJFACgo7kSGFAlUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQJbJjc9AwA2SwIFtkxuegYAbJYECgAAkIZAAQAA0hAoAABAGgIFAABIQ6DA5slVugCALZJAgc2Tq3QBAFskgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQJvrdLRAwAAyEygwFurpqMHAACQmUABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQYNNwvxMAgDdBoMCm4X4nAABvgkABAADSECgAAEAaAgUAAEhDoAAAbDwukgIbSKAAAGw8LpICG0igAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAoAAJCGQAEAANIQKAAAQBoCBQAASEOgAAAAaQgUAAAgDYECAJBH6egBQEcTKAAAQBoCBQAgj5qOHgB0NIEC/2S3OgBABxMo8E++tQIA6GACBQAASEOgAAAAaQgUAAAgDYECAACkIVAAAIA0BAr8k8sMAwB0MIEC/+QywwAAHUygAADkYW8+Wz2BAgAApCFQAADycLgxWz2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABaEMppaOH0KrMYwOAN0ugAAAAaQgUgDbU1NS0+Xh79mKsa5727glZe76Wxtbautaebu8LAFkJFIDNVOPIeLORAwDZCBSANnTkBr2YAGBrJFAANsC6DgFrzzztWcf6zPdWrwsANiaBAgAApCFQANqwuexp2FzGCQDrIlAAtgDOVwFgSyFQANqwMTb8N1Y8bMwIETQAZCVQAJJy2BYAWyOBAtCGzJHQeGyZxwkA60OgAGxiGyseNsYljdd3PgB4qwkUAAAgDYECsBWo7DGx5wSA7AQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAMDGUzp6ALC5EygAABtPTUcPADZ3AgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABAADSECgAAEAaAgUAAEhDoAAAAGkIFAAAIA2BAgAApCFQAACANAQKAACQhkABADYnpaMHAGxaAgUA2JzUdPQAgE1LoAAAAGkIFAAAIA2BAgAApCFQAACANAQKANDRXJkLqBIoQGtsMABvFVfmAqoECtAaGwwAwFtOoAAAAGkIFAAAIA2BAmwo56oAABuNQAE2lHNVAICNRqAAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZAAQAA0hAoAABAGgIFAABIQ6AAAABpCBQAACANgQIAAKQhUAAAgDQECgAAkIZA+f/at2McKWIggKLVdl9g7n+ruc+oN0ANiwTagAW+tO8lduDA6ZddAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgAyBAgAAZAgUAAAgQ6AAAAAZAgUAAMgQKAAAQIZAAQAAMgQKAACQIVAAAIAMgQIAAGQIFAAAIEOgAAAAGQIFAADIECgAAECGQAEAADIECgAAkCFQAACADIECAABkCBQAACBDoAAAABkCBQAAyBAoAABAhkABAAAyBAoAAJAhUAAAgIx1HMfMzFzX9Z+vAgAAfAV3e9wt8t73F5TjOH55AAAA4LN81B3rPvR+BQAA+Bs+ao/z3qy1Zu89MzPP5/MfXA0AAPhq9t6z1o9R+Ou6foqVc+Zbvay15jzPeTwe83q95roucykAAMAfu7917b3nPM9Za/32JeUNFBoqx6GUa80AAAAASUVORK5CYII=" # noqa
| 7,266.653846
| 122,268
| 0.967354
|
10a4654109a4de6765483d961ea5240e3193b14d
| 1,285
|
py
|
Python
|
etc/scripts/tree.py
|
bkchung/dotfiles_old
|
396582eaea2a593f5f05908e136dca2cdf0fd29c
|
[
"Vim",
"curl",
"MIT"
] | 852
|
2015-01-15T23:22:27.000Z
|
2022-03-12T04:13:45.000Z
|
etc/scripts/tree.py
|
bkchung/dotfiles_old
|
396582eaea2a593f5f05908e136dca2cdf0fd29c
|
[
"Vim",
"curl",
"MIT"
] | 6
|
2015-10-05T02:47:13.000Z
|
2022-03-11T15:34:31.000Z
|
etc/scripts/tree.py
|
bkchung/dotfiles_old
|
396582eaea2a593f5f05908e136dca2cdf0fd29c
|
[
"Vim",
"curl",
"MIT"
] | 326
|
2015-02-26T12:37:39.000Z
|
2022-03-13T12:34:46.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
INDENT_WIDTH = 4
EXCULUDE_LIST = ['.git']
def main(args):
path_list = []
if len(args) == 0:
path_list.append('.')
else:
for path in args:
path = os.path.expanduser(path)
if not os.path.isdir(path):
print('Error: invalid path', path, file=sys.stderr)
return 1
path_list.append(path)
tree(path_list)
def tree(path_list, indent=0):
for path in path_list:
basename = os.path.basename(path)
if basename in EXCULUDE_LIST:
continue
if os.path.islink(path):
print(' ' * INDENT_WIDTH * indent + basename + '@')
elif os.path.isdir(path):
print(' ' * INDENT_WIDTH * indent + basename + '/')
children = os.listdir(path)
children = [os.path.join(path, x) for x in children]
tree(children, indent + 1)
else:
print(' ' * INDENT_WIDTH * indent + basename)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 29.204545
| 76
| 0.494942
|
51bc23e81e61870b4ab953c72f0a4c8077bc5a43
| 6,115
|
py
|
Python
|
fastf1/utils.py
|
weracenet/Fast-F1
|
d063234199043840c62c53df65f272d37cbef0c3
|
[
"MIT"
] | null | null | null |
fastf1/utils.py
|
weracenet/Fast-F1
|
d063234199043840c62c53df65f272d37cbef0c3
|
[
"MIT"
] | null | null | null |
fastf1/utils.py
|
weracenet/Fast-F1
|
d063234199043840c62c53df65f272d37cbef0c3
|
[
"MIT"
] | null | null | null |
"""
Utils module - :mod:`fastf1.utils`
==================================
"""
from functools import reduce
import numpy as np
from datetime import datetime, timedelta
def delta_time(reference_lap, compare_lap):
# TODO move somewhere else
"""Calculates the delta time of a given lap, along the 'Distance' axis
of the reference lap.
.. warning:: This is a nice gimmick but not actually very accurate which
is an inherent problem from the way this is calculated currently (There
may not be a better way though). In comparison with the sector times and the
differences that can be calculated from these, there are notable differences!
You should always verify the result against sector time differences or find a
different way for verification.
Here is an example that compares the quickest laps of Leclerc and
Hamilton from Bahrain 2021 Qualifying:
.. plot::
:include-source:
import fastf1 as ff1
from fastf1 import plotting
from fastf1 import utils
from matplotlib import pyplot as plt
plotting.setup_mpl()
quali = ff1.get_session(2021, 'Emilia Romagna', 'Q')
quali.load()
lec = session.laps.pick_driver('LEC').pick_fastest()
ham = session.laps.pick_driver('HAM').pick_fastest()
delta_time, ref_tel, compare_tel = utils.delta_time(ham, lec)
# ham is reference, lec is compared
fig, ax = plt.subplots()
# use telemetry returned by .delta_time for best accuracy,
# this ensure the same applied interpolation and resampling
ax.plot(ref_tel['Distance'], ref_tel['Speed'],
color=plotting.team_color(ham['Team']))
ax.plot(compare_tel['Distance'], compare_tel['Speed'],
color=plotting.team_color(lec['Team']))
twin = ax.twinx()
twin.plot(ref_tel['Distance'], delta_time, '--', color='white')
twin.set_ylabel("<-- Lec ahead | Ham ahead -->")
plt.show()
Args:
reference_lap (pd.Series): The lap taken as reference
compare_lap (pd.Series): The lap to compare
Returns:
tuple: (delta, reference, comparison)
- pd.Series of type `float64` with the delta in seconds.
- :class:`Telemetry` for the reference lap
- :class:`Telemetry` for the comparison lap
Use the return telemetry for plotting to make sure you have
telemetry data that was created with the same interpolation and
resampling options!
"""
ref = reference_lap.get_car_data(interpolate_edges=True).add_distance()
comp = compare_lap.get_car_data(interpolate_edges=True).add_distance()
def mini_pro(stream):
# Ensure that all samples are interpolated
dstream_start = stream[1] - stream[0]
dstream_end = stream[-1] - stream[-2]
return np.concatenate([[stream[0] - dstream_start], stream, [stream[-1] + dstream_end]])
ltime = mini_pro(comp['Time'].dt.total_seconds().to_numpy())
ldistance = mini_pro(comp['Distance'].to_numpy())
lap_time = np.interp(ref['Distance'], ldistance, ltime)
delta = lap_time - ref['Time'].dt.total_seconds()
return delta, ref, comp
def recursive_dict_get(d, *keys, default_none=False):
"""Recursive dict get. Can take an arbitrary number of keys and returns an empty
dict if any key does not exist.
https://stackoverflow.com/a/28225747"""
ret = reduce(lambda c, k: c.get(k, {}), keys, d)
if default_none and ret == {}:
return None
else:
return ret
def to_timedelta(x):
"""Fast timedelta object creation from a time string
Permissible string formats:
For example: `13:24:46.320215` with:
- optional hours and minutes
- optional microseconds and milliseconds with
arbitrary precision (1 to 6 digits)
Examples of valid formats:
- `24.3564` (seconds + milli/microseconds)
- `36:54` (minutes + seconds)
- `8:45:46` (hours, minutes, seconds)
Args:
x (str or timedelta):
Returns:
datetime.timedelta
"""
# this is faster than using pd.timedelta on a string
if isinstance(x, str) and len(x):
hours, minutes = 0, 0
if len(hms := x.split(':')) == 3:
hours, minutes, seconds = hms
elif len(hms) == 2:
minutes, seconds = hms
else:
seconds = hms[0]
if '.' in seconds:
seconds, msus = seconds.split('.')
if len(msus) < 6:
msus = msus + '0' * (6 - len(msus))
elif len(msus) > 6:
msus = msus[0:6]
else:
msus = 0
return timedelta(hours=int(hours), minutes=int(minutes),
seconds=int(seconds), microseconds=int(msus))
elif isinstance(x, timedelta):
return x
def to_datetime(x):
"""Fast datetime object creation from a date string.
Permissible string formats:
For example '2020-12-13T13:27:15.320000Z' with:
- optional milliseconds and microseconds with
arbitrary precision (1 to 6 digits)
- with optional trailing letter 'Z'
Examples of valid formats:
- `2020-12-13T13:27:15.320000`
- `2020-12-13T13:27:15.32Z`
- `2020-12-13T13:27:15`
Args:
x (str or datetime)
Returns:
datetime.datetime
"""
if isinstance(x, str):
date, time = x.strip('Z').split('T')
year, month, day = date.split('-')
hours, minutes, seconds = time.split(':')
if '.' in seconds:
seconds, msus = seconds.split('.')
if len(msus) < 6:
msus = msus+'0'*(6-len(msus))
elif len(msus) > 6:
msus = msus[0:6]
else:
msus = 0
return datetime(int(year), int(month), int(day), int(hours),
int(minutes), int(seconds), int(msus))
elif isinstance(x, datetime):
return x
| 32.876344
| 96
| 0.597383
|
ac5103e137c648a786b603eadbad08a394a77885
| 801
|
py
|
Python
|
mdp.py
|
rrmenon10/MarkovChain
|
e1143181338bef373ca27e44c483a97e60c17dec
|
[
"MIT"
] | null | null | null |
mdp.py
|
rrmenon10/MarkovChain
|
e1143181338bef373ca27e44c483a97e60c17dec
|
[
"MIT"
] | null | null | null |
mdp.py
|
rrmenon10/MarkovChain
|
e1143181338bef373ca27e44c483a97e60c17dec
|
[
"MIT"
] | null | null | null |
__author__ = 'Rakesh R Menon'
class MarkovChain:
def __init__(self, chain_length):
if chain_length <=2:
raise ValueError('Please provide Markov Chain length > 2 for task.')
self.current_state = 2
self.goal_state = self.chain_length = chain_length
self.action_dim = 2
def reset(self):
self.current_state = 2
return self.current_state
def step(self, action):
if action == 0:
if self.current_state != 1:
self.current_state -= 1
elif action == 1:
if self.current_state != self.goal_state:
self.current_state += 1
else:
raise ValueError("Action out of bounds")
if self.current_state == self.goal_state:
return self.current_state, 1.00
elif self.current_state == 1:
return self.current_state, 0.001
else:
return self.current_state, 0.00
| 20.025
| 71
| 0.700375
|
12d5e9b7df122a0cf6c087eac0da5d8b33e4567d
| 1,354
|
py
|
Python
|
cyber/python/cyber_py/examples/timer.py
|
dustinksi/Edith
|
09f151612fadd2155b89c208e2af91c41d837a03
|
[
"Apache-2.0"
] | 3
|
2020-02-07T13:09:50.000Z
|
2020-08-31T12:37:48.000Z
|
cyber/python/cyber_py/examples/timer.py
|
dustinksi/Edith
|
09f151612fadd2155b89c208e2af91c41d837a03
|
[
"Apache-2.0"
] | null | null | null |
cyber/python/cyber_py/examples/timer.py
|
dustinksi/Edith
|
09f151612fadd2155b89c208e2af91c41d837a03
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python2
# ****************************************************************************
# Copyright 2020 The Edith Author. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
"""Module for example of timer."""
import time
from cyber_py import cyber
from cyber_py import cyber_timer
count = 0
def fun():
global count
print("cb fun is called:", count)
count += 1
def test_timer():
cyber.init()
ct = cyber_timer.Timer(10, fun, 0) # 10ms
ct.start()
time.sleep(1) # 1s
ct.stop()
print("+" * 80, "test set_option")
ct2 = cyber_timer.Timer() # 10ms
ct2.set_option(10, fun, 0)
ct2.start()
time.sleep(1) # 1s
ct2.stop()
cyber.shutdown()
if __name__ == '__main__':
test_timer()
| 25.074074
| 78
| 0.604874
|
0b9d25c22cddf7a3a4eb0750fcd07eb6f910130b
| 1,765
|
py
|
Python
|
django_app/generate_views_urls.py
|
federiva/django-bootstrap-studio-tools
|
3a016346c64d273b01cd92e2afef4ccb06f29987
|
[
"MIT"
] | null | null | null |
django_app/generate_views_urls.py
|
federiva/django-bootstrap-studio-tools
|
3a016346c64d273b01cd92e2afef4ccb06f29987
|
[
"MIT"
] | null | null | null |
django_app/generate_views_urls.py
|
federiva/django-bootstrap-studio-tools
|
3a016346c64d273b01cd92e2afef4ccb06f29987
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 9 13:16:15 2020
@author: federiva
"""
import sys
import os
def list_html_files(path_html):
abs_path= os.path.abspath(path_html)
html_files= os.listdir(path_html)
html_files= [os.path.join(abs_path, x) for x in html_files if x.endswith('.html')]
return html_files
def generate_views(html_files):
with open('views.py', 'w+') as viewfile:
viewfile.write('from django.shortcuts import render\n')
for html in html_files:
viewfile.write(write_view(html))
def write_view(html_file):
name_view= get_name(html_file)
view= '''\ndef render_{0}(request):\n return render(request, '{0}.html', {{}})\n'''.format(name_view)
return view
def write_path(html_file):
name_view= get_name(html_file)
url_path= ''' re_path(r'^{0}$', views.render_{0}, name='{0}'),\n'''.format(name_view)
return url_path
def get_name(html_file):
name_view= html_file.split('.')[0]
name_view= os.path.split(name_view)[1]
name_view= name_view.replace(' ', '_').lower()
return name_view
def genereate_urls(html_files, app_name):
with open('urls.py', 'w+') as urlfile:
urlfile.write('from . import views\n')
urlfile.write('from django.urls import path, re_path\n')
urlfile.write('''app_name = '{}'\n'''.format(app_name))
urlfile.write('''urlpatterns = [\n''')
for html in html_files:
urlfile.write(write_path(html))
urlfile.write(''' ]\n''')
if __name__ == '__main__':
path_html_files= sys.argv[1]
app_name= sys.argv[2]
html_files= list_html_files(path_html_files)
generate_views(html_files)
genereate_urls(html_files, app_name)
| 30.964912
| 108
| 0.648159
|
098790b4c5385f6967f0d406ec8c856b9fe3385b
| 9,803
|
py
|
Python
|
tests/kafkatest/tests/connect/connect_rest_test.py
|
aartigupta/kafka
|
bf4fa286dfd4812229081c0f31d1318c360adb7f
|
[
"Apache-2.0"
] | null | null | null |
tests/kafkatest/tests/connect/connect_rest_test.py
|
aartigupta/kafka
|
bf4fa286dfd4812229081c0f31d1318c360adb7f
|
[
"Apache-2.0"
] | null | null | null |
tests/kafkatest/tests/connect/connect_rest_test.py
|
aartigupta/kafka
|
bf4fa286dfd4812229081c0f31d1318c360adb7f
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kafkatest.tests.kafka_test import KafkaTest
from kafkatest.services.connect import ConnectDistributedService, ConnectRestError
from ducktape.utils.util import wait_until
from ducktape.mark.resource import cluster
from ducktape.cluster.remoteaccount import RemoteCommandError
import json
import itertools
class ConnectRestApiTest(KafkaTest):
"""
Test of Kafka Connect's REST API endpoints.
"""
FILE_SOURCE_CONNECTOR = 'org.apache.kafka.connect.file.FileStreamSourceConnector'
FILE_SINK_CONNECTOR = 'org.apache.kafka.connect.file.FileStreamSinkConnector'
FILE_SOURCE_CONFIGS = {'name', 'connector.class', 'tasks.max', 'key.converter', 'value.converter', 'topic', 'file'}
FILE_SINK_CONFIGS = {'name', 'connector.class', 'tasks.max', 'key.converter', 'value.converter', 'topics', 'file'}
INPUT_FILE = "/mnt/connect.input"
INPUT_FILE2 = "/mnt/connect.input2"
OUTPUT_FILE = "/mnt/connect.output"
TOPIC = "test"
OFFSETS_TOPIC = "connect-offsets"
CONFIG_TOPIC = "connect-configs"
STATUS_TOPIC = "connect-status"
# Since tasks can be assigned to any node and we're testing with files, we need to make sure the content is the same
# across all nodes.
INPUT_LIST = ["foo", "bar", "baz"]
INPUTS = "\n".join(INPUT_LIST) + "\n"
LONGER_INPUT_LIST = ["foo", "bar", "baz", "razz", "ma", "tazz"]
LONER_INPUTS = "\n".join(LONGER_INPUT_LIST) + "\n"
SCHEMA = {"type": "string", "optional": False}
def __init__(self, test_context):
super(ConnectRestApiTest, self).__init__(test_context, num_zk=1, num_brokers=1, topics={
'test': {'partitions': 1, 'replication-factor': 1}
})
self.cc = ConnectDistributedService(test_context, 2, self.kafka, [self.INPUT_FILE, self.INPUT_FILE2, self.OUTPUT_FILE])
@cluster(num_nodes=4)
def test_rest_api(self):
# Template parameters
self.key_converter = "org.apache.kafka.connect.json.JsonConverter"
self.value_converter = "org.apache.kafka.connect.json.JsonConverter"
self.schemas = True
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
assert self.cc.list_connectors() == []
assert set([connector_plugin['class'] for connector_plugin in self.cc.list_connector_plugins()]) == {self.FILE_SOURCE_CONNECTOR, self.FILE_SINK_CONNECTOR}
source_connector_props = self.render("connect-file-source.properties")
sink_connector_props = self.render("connect-file-sink.properties")
self.logger.info("Validating connector configurations")
source_connector_config = self._config_dict_from_props(source_connector_props)
configs = self.cc.validate_config(self.FILE_SOURCE_CONNECTOR, source_connector_config)
self.verify_config(self.FILE_SOURCE_CONNECTOR, self.FILE_SOURCE_CONFIGS, configs)
sink_connector_config = self._config_dict_from_props(sink_connector_props)
configs = self.cc.validate_config(self.FILE_SINK_CONNECTOR, sink_connector_config)
self.verify_config(self.FILE_SINK_CONNECTOR, self.FILE_SINK_CONFIGS, configs)
self.logger.info("Creating connectors")
self.cc.create_connector(source_connector_config)
self.cc.create_connector(sink_connector_config)
# We should see the connectors appear
wait_until(lambda: set(self.cc.list_connectors()) == set(["local-file-source", "local-file-sink"]),
timeout_sec=10, err_msg="Connectors that were just created did not appear in connector listing")
# We'll only do very simple validation that the connectors and tasks really ran.
for node in self.cc.nodes:
node.account.ssh("echo -e -n " + repr(self.INPUTS) + " >> " + self.INPUT_FILE)
wait_until(lambda: self.validate_output(self.INPUT_LIST), timeout_sec=120, err_msg="Data added to input file was not seen in the output file in a reasonable amount of time.")
# Trying to create the same connector again should cause an error
try:
self.cc.create_connector(self._config_dict_from_props(source_connector_props))
assert False, "creating the same connector should have caused a conflict"
except ConnectRestError:
pass # expected
# Validate that we can get info about connectors
expected_source_info = {
'name': 'local-file-source',
'config': self._config_dict_from_props(source_connector_props),
'tasks': [{'connector': 'local-file-source', 'task': 0}]
}
source_info = self.cc.get_connector("local-file-source")
assert expected_source_info == source_info, "Incorrect info:" + json.dumps(source_info)
source_config = self.cc.get_connector_config("local-file-source")
assert expected_source_info['config'] == source_config, "Incorrect config: " + json.dumps(source_config)
expected_sink_info = {
'name': 'local-file-sink',
'config': self._config_dict_from_props(sink_connector_props),
'tasks': [{'connector': 'local-file-sink', 'task': 0}]
}
sink_info = self.cc.get_connector("local-file-sink")
assert expected_sink_info == sink_info, "Incorrect info:" + json.dumps(sink_info)
sink_config = self.cc.get_connector_config("local-file-sink")
assert expected_sink_info['config'] == sink_config, "Incorrect config: " + json.dumps(sink_config)
# Validate that we can get info about tasks. This info should definitely be available now without waiting since
# we've already seen data appear in files.
# TODO: It would be nice to validate a complete listing, but that doesn't make sense for the file connectors
expected_source_task_info = [{
'id': {'connector': 'local-file-source', 'task': 0},
'config': {
'task.class': 'org.apache.kafka.connect.file.FileStreamSourceTask',
'file': self.INPUT_FILE,
'topic': self.TOPIC
}
}]
source_task_info = self.cc.get_connector_tasks("local-file-source")
assert expected_source_task_info == source_task_info, "Incorrect info:" + json.dumps(source_task_info)
expected_sink_task_info = [{
'id': {'connector': 'local-file-sink', 'task': 0},
'config': {
'task.class': 'org.apache.kafka.connect.file.FileStreamSinkTask',
'file': self.OUTPUT_FILE,
'topics': self.TOPIC
}
}]
sink_task_info = self.cc.get_connector_tasks("local-file-sink")
assert expected_sink_task_info == sink_task_info, "Incorrect info:" + json.dumps(sink_task_info)
file_source_config = self._config_dict_from_props(source_connector_props)
file_source_config['file'] = self.INPUT_FILE2
self.cc.set_connector_config("local-file-source", file_source_config)
# We should also be able to verify that the modified configs caused the tasks to move to the new file and pick up
# more data.
for node in self.cc.nodes:
node.account.ssh("echo -e -n " + repr(self.LONER_INPUTS) + " >> " + self.INPUT_FILE2)
wait_until(lambda: self.validate_output(self.LONGER_INPUT_LIST), timeout_sec=120, err_msg="Data added to input file was not seen in the output file in a reasonable amount of time.")
self.cc.delete_connector("local-file-source")
self.cc.delete_connector("local-file-sink")
wait_until(lambda: len(self.cc.list_connectors()) == 0, timeout_sec=10, err_msg="Deleted connectors did not disappear from REST listing")
def validate_output(self, input):
input_set = set(input)
# Output needs to be collected from all nodes because we can't be sure where the tasks will be scheduled.
output_set = set(itertools.chain(*[
[line.strip() for line in self.file_contents(node, self.OUTPUT_FILE)] for node in self.cc.nodes
]))
return input_set == output_set
def file_contents(self, node, file):
try:
# Convert to a list here or the RemoteCommandError may be returned during a call to the generator instead of
# immediately
return list(node.account.ssh_capture("cat " + file))
except RemoteCommandError:
return []
def _config_dict_from_props(self, connector_props):
return dict([line.strip().split('=', 1) for line in connector_props.split('\n') if line.strip() and not line.strip().startswith('#')])
def verify_config(self, name, config_def, configs):
# Should have zero errors
assert name == configs['name']
# Should have zero errors
assert 0 == configs['error_count']
# Should return all configuration
config_names = [config['definition']['name'] for config in configs['configs']]
assert config_def == set(config_names)
| 50.530928
| 189
| 0.685504
|
dd7e46285d737c6c5680c47e62424896c521614f
| 13,203
|
py
|
Python
|
grafeas/rest.py
|
hanyuwang1993/client-python
|
7eadfcc01047bc92fe1ff71faed1173d370b8954
|
[
"Apache-2.0"
] | null | null | null |
grafeas/rest.py
|
hanyuwang1993/client-python
|
7eadfcc01047bc92fe1ff71faed1173d370b8954
|
[
"Apache-2.0"
] | null | null | null |
grafeas/rest.py
|
hanyuwang1993/client-python
|
7eadfcc01047bc92fe1ff71faed1173d370b8954
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
grafeas.proto
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1beta1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if six.PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
| 40.75
| 134
| 0.541771
|
a83fbc9199b55abd2c3432061d0c714088cbba8d
| 361
|
py
|
Python
|
instagram_api/response/model/voter_info.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | 13
|
2019-08-07T21:24:34.000Z
|
2020-12-12T12:23:50.000Z
|
instagram_api/response/model/voter_info.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
instagram_api/response/model/voter_info.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
from ..mapper import PropertyMapper, ApiInterfaceBase
from ..mapper.types import Timestamp, AnyType
from .voter import Voter
__all__ = ['VoterInfo', 'VoterInfoInterface']
class VoterInfoInterface(ApiInterfaceBase):
poll_id: int
voters: [Voter]
max_id: int
more_available: bool
class VoterInfo(PropertyMapper, VoterInfoInterface):
pass
| 20.055556
| 53
| 0.759003
|
51906d3990ac3ca8b1a1a679a6f804d49e0affab
| 69,882
|
py
|
Python
|
odim-controller/scripts/odim-controller.py
|
nimmalagautam/ODIM
|
9da14cf26bcb8bf5177e4488ebd7a6ec5f7ebc60
|
[
"Apache-2.0"
] | null | null | null |
odim-controller/scripts/odim-controller.py
|
nimmalagautam/ODIM
|
9da14cf26bcb8bf5177e4488ebd7a6ec5f7ebc60
|
[
"Apache-2.0"
] | null | null | null |
odim-controller/scripts/odim-controller.py
|
nimmalagautam/ODIM
|
9da14cf26bcb8bf5177e4488ebd7a6ec5f7ebc60
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
#(C) Copyright [2020] Hewlett Packard Enterprise Development LP
#
#Licensed under the Apache License, Version 2.0 (the "License"); you may
#not use this file except in compliance with the License. You may obtain
#a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#License for the specific language governing permissions and limitations
#under the License.
from yaml import SafeDumper
from Crypto.PublicKey import RSA
from os import path
import argparse, yaml, logging
import os, sys, subprocess
import glob, shutil, copy, getpass
# global variables
logger = None
CONTROLLER_CONF_DATA = None
CONTROLLER_CONF_FILE = ""
DEPLOYMENT_SRC_DIR = ""
KUBESPRAY_SRC_PATH = ""
CONTROLLER_SRC_PATH = ""
CONTROLLER_BASE_PATH = ""
DRY_RUN_SET = False
NO_PROMPT_SET = False
IGNORE_ERRORS_SET = False
K8S_INVENTORY_DATA = None
K8S_INVENTORY_FILE = ""
ODIMRA_VAULT_KEY_FILE = ""
ANSIBLE_SUDO_PW_FILE = ""
ANSIBLE_BECOME_PASS = ""
DEPLOYMENT_ID = ""
ODIMRA_SRC_PATH = ""
ODIMRA_VAULT_BIN = ""
MIN_REPLICA_COUNT= 0
MAX_REPLICA_COUNT= 10
# write_node_details is used for creating hosts.yaml required
# for deploying kuberentes cluster using kubespray. hosts.yaml
# is prepared based on the parameters provided in odim-controller conf
def write_node_details():
global CONTROLLER_CONF_DATA
logger.debug("Preparing hosts file required for k8s cluster deployment")
# initialize empty dict with mandatory keys of hosts.yaml
node_details = {
'all': {
'hosts': {},
'children': {
'kube-master': {'hosts': {}},
'kube-node': {'hosts': {}},
'etcd': {'hosts': {}},
'k8s-cluster': {'children': {'kube-master': None, 'kube-node': None}},
'calico-rr': {'hosts': {}}
}
}
}
# update node information in host.yamls as provided in odim-controller conf
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
temp_dict = {node : {'ansible_host': attrs['ip'], 'ip':attrs['ip'], 'access_ip':attrs['ip']}}
node_details['all']['hosts'].update(temp_dict)
temp_dict = {node: None}
if attrs["isMaster"]:
logger.debug("%s(%s) is marked as master node", node, attrs['ip'])
node_details['all']['children']['kube-master']['hosts'].update(temp_dict)
node_details['all']['children']['kube-node']['hosts'].update(temp_dict)
node_details['all']['children']['etcd']['hosts'].update(temp_dict)
else:
node_details['all']['children']['kube-node']['hosts'].update(temp_dict)
# consider None as empty dictionary
SafeDumper.add_representer(type(None),lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:null', ''))
with open('./kube_hosts_details.yaml', 'w') as f:
yaml.safe_dump(node_details, f, default_flow_style=False)
logger.debug("Hosts file prepared and stored at ./kube_hosts_details.yaml")
# read_conf is used for loading the odim-controller conf
def read_conf():
global CONTROLLER_CONF_DATA
if not os.path.isfile(CONTROLLER_CONF_FILE):
logger.critical("invalid conf file %s passed, exiting!!!", CONTROLLER_CONF_FILE)
sys.exit(1)
logger.debug("Reading config file %s", CONTROLLER_CONF_FILE)
with open(CONTROLLER_CONF_FILE) as f:
CONTROLLER_CONF_DATA = yaml.load(f, Loader=yaml.FullLoader)
# load existing hosts.yaml that created for the deployment_id
def load_k8s_host_conf():
global K8S_INVENTORY_DATA, DEPLOYMENT_SRC_DIR, K8S_INVENTORY_FILE
DEPLOYMENT_SRC_DIR = './inventory/k8s-cluster-' + DEPLOYMENT_ID
K8S_INVENTORY_FILE = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not os.path.exists(K8S_INVENTORY_FILE):
logger.critical("Previous deployment data not found for %s, not an existing deployment deployment", DEPLOYMENT_ID)
sys.exit(1)
with open(K8S_INVENTORY_FILE) as f:
K8S_INVENTORY_DATA = yaml.load(f, Loader=yaml.FullLoader)
# update_ansible_conf is used for updating kubespray's internal
# configuration which will be used for executing ansible-playbook
# commands.
# proxy related information will be updated in group_vars/all/all.yml
def update_ansible_conf():
http_proxy = ""
https_proxy = ""
no_proxy = ""
if 'httpProxy' in CONTROLLER_CONF_DATA and (CONTROLLER_CONF_DATA['httpProxy'] != "" or CONTROLLER_CONF_DATA['httpProxy'] != None):
http_proxy = CONTROLLER_CONF_DATA['httpProxy']
if 'httpsProxy' in CONTROLLER_CONF_DATA and (CONTROLLER_CONF_DATA['httpsProxy'] != "" or CONTROLLER_CONF_DATA['httpsProxy'] != None):
https_proxy = CONTROLLER_CONF_DATA['httpsProxy']
if 'noProxy' in CONTROLLER_CONF_DATA and (CONTROLLER_CONF_DATA['noProxy'] != "" or CONTROLLER_CONF_DATA['noProxy'] != None):
no_proxy = CONTROLLER_CONF_DATA['noProxy']
env_conf_filepath = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'group_vars/all/all.yml')
fd = open(env_conf_filepath, "rt")
fdata = fd.read()
if http_proxy != "":
fdata = fdata.replace('# http_proxy: ""', 'http_proxy: "'+http_proxy+'"')
if https_proxy != "":
fdata = fdata.replace('# https_proxy: ""', 'https_proxy: "'+https_proxy+'"')
if no_proxy != "":
fdata = fdata.replace('# no_proxy: ""', 'no_proxy: "'+no_proxy+'"')
fd.close()
if http_proxy != "" or https_proxy != "":
fd = open(env_conf_filepath, "wt")
fd.write(fdata)
fd.close()
# perform_checks is used for validating the configuration
# parameters passed to odim-controller.
# For any operation KUBESPRAY_SRC_PATH, odim_controller_path, deployment_id
# are mandatory parameters and optional parameter checks can be skipped by
# passing skip_opt_param_check argument set to True
def perform_checks(skip_opt_param_check=False):
global KUBESPRAY_SRC_PATH, CONTROLLER_SRC_PATH, CONTROLLER_CONF_DATA, DEPLOYMENT_ID
global CONTROLLER_BASE_PATH, ANSIBLE_SUDO_PW_FILE, DEPLOYMENT_SRC_DIR, ODIMRA_SRC_PATH
global ODIMRA_VAULT_BIN, ODIMRA_VAULT_KEY_FILE
global KUBERNETES_IMAGE_PATH, ODIMRA_IMAGE_PATH
if 'deploymentID' not in CONTROLLER_CONF_DATA or CONTROLLER_CONF_DATA['deploymentID'] == None or CONTROLLER_CONF_DATA['deploymentID'] == "":
logger.critical("deployment ID not configured, exiting!!!")
sys.exit(1)
DEPLOYMENT_ID = CONTROLLER_CONF_DATA['deploymentID']
if not skip_opt_param_check:
logger.debug("Checking if the local user matches with the configired nodes user")
cur_user = os.getenv('USER')
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
if cur_user != attrs['username']:
logger.critical("User names of local host and all remote hosts should match")
sys.exit(1)
if 'odimControllerSrcPath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['odimControllerSrcPath'] == None or \
CONTROLLER_CONF_DATA['odimControllerSrcPath'] == "":
logger.critical("odim-controller source path not configured, exiting!!!")
sys.exit(1)
CONTROLLER_BASE_PATH = CONTROLLER_CONF_DATA['odimControllerSrcPath']
if not os.path.isdir(CONTROLLER_BASE_PATH):
logger.critical("invalid odim-controller source path configured, exiting!!!")
sys.exit(1)
CONTROLLER_SRC_PATH = os.path.join(CONTROLLER_BASE_PATH, 'scripts')
if not os.path.isdir(CONTROLLER_SRC_PATH):
logger.critical("%s directory does not exist, exiting!!!", CONTROLLER_SRC_PATH)
sys.exit(1)
KUBESPRAY_SRC_PATH = os.path.join(CONTROLLER_BASE_PATH, 'kubespray')
if not os.path.isdir(KUBESPRAY_SRC_PATH):
logger.critical("%s directory does not exist, exiting!!!", KUBESPRAY_SRC_PATH)
sys.exit(1)
ODIMRA_SRC_PATH = os.path.join(CONTROLLER_BASE_PATH, 'odimra')
if not os.path.isdir(ODIMRA_SRC_PATH):
logger.critical("%s directory does not exist, exiting!!!", ODIMRA_SRC_PATH)
sys.exit(1)
check_extract_kubespray_src()
DEPLOYMENT_SRC_DIR = os.path.join(KUBESPRAY_SRC_PATH, 'inventory/k8s-cluster-' + DEPLOYMENT_ID)
if not os.path.exists(DEPLOYMENT_SRC_DIR):
os.mkdir(DEPLOYMENT_SRC_DIR, 0o755)
ODIMRA_VAULT_BIN = os.path.join(CONTROLLER_SRC_PATH, 'odim-vault')
if not os.path.exists(ODIMRA_VAULT_BIN):
logger.critical("%s does not exist, exiting!!!", ODIMRA_VAULT_BIN)
if 'odimVaultKeyFilePath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['odimVaultKeyFilePath'] == None or CONTROLLER_CONF_DATA['odimVaultKeyFilePath'] == "":
store_vault_key()
else:
ODIMRA_VAULT_KEY_FILE = CONTROLLER_CONF_DATA['odimVaultKeyFilePath']
if 'nodePasswordFilePath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['nodePasswordFilePath'] == None or CONTROLLER_CONF_DATA['nodePasswordFilePath'] == "":
ANSIBLE_SUDO_PW_FILE = os.path.join(KUBESPRAY_SRC_PATH, 'inventory/k8s-cluster-' + DEPLOYMENT_ID, '.node_pw.dat')
if not os.path.exists(ANSIBLE_SUDO_PW_FILE):
store_password_in_vault()
else:
ANSIBLE_SUDO_PW_FILE = CONTROLLER_CONF_DATA['nodePasswordFilePath']
if not os.path.exists(ANSIBLE_SUDO_PW_FILE):
logger.critical("%s does not exist, exiting!!!", ANSIBLE_SUDO_PW_FILE)
cert_dir = os.path.join(CONTROLLER_SRC_PATH, 'certs')
if not os.path.exists(cert_dir):
os.mkdir(cert_dir, 0o700)
if 'kubernetesImagePath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['kubernetesImagePath'] == None or CONTROLLER_CONF_DATA['kubernetesImagePath'] == "":
logger.info(" Kubernetes Image directory not provided, required images will be downloaded!!!")
KUBERNETES_IMAGE_PATH=""
else:
KUBERNETES_IMAGE_PATH = CONTROLLER_CONF_DATA['kubernetesImagePath']
if not os.path.exists(KUBERNETES_IMAGE_PATH):
logger.warning("%s does not exist, required images will be downloaded!!!", KUBERNETES_IMAGE_PATH)
if 'odimraImagePath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['odimraImagePath'] == None or \
CONTROLLER_CONF_DATA['odimraImagePath'] == "":
logger.warning("odimra image source path not configured, expecting user to copy & load all the required odimra docker images on cluster nodes !!!")
ODIMRA_IMAGE_PATH=""
else:
ODIMRA_IMAGE_PATH = CONTROLLER_CONF_DATA['odimraImagePath']
if not os.path.isdir(ODIMRA_IMAGE_PATH):
logger.critical("invalid odimra image source path configured, exiting!!!")
sys.exit(1)
# exec is used for executing shell commands.
# It accepts the command to be executed and environment
# variables to set in the form of dictionary.
# It returns command exit code of the command execution
def exec(cmd, set_env):
cmd_env = os.environ.copy()
cmd_env.update(set_env)
execHdlr = subprocess.Popen(cmd,
env=cmd_env,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
universal_newlines=True)
for output in execHdlr.stdout:
print(output.strip())
try:
std_out, std_err = execHdlr.communicate()
except TimeoutExpired:
execHdlr.kill()
return execHdlr.returncode
# copy_ssh_keys_remote_host is used for copying
# ssh keys to remote nodes to enable password-less
# login to those nodes provided in odim-controller conf
def copy_ssh_keys_remote_host():
cur_user = os.getenv('USER')
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
logger.debug("Enabling password-less login to %s(%s)", node, attrs['ip'])
sync_cmd = '/usr/bin/sshpass -e /usr/bin/ssh-copy-id -o StrictHostKeyChecking=no -i {conf_path} {username}@{node_ip}'.format(
conf_path=os.path.join(os.getenv('HOME'), '.ssh/id_rsa.pub'),
username=attrs['username'],
node_ip=attrs['ip'])
ret = exec(sync_cmd, {'SSHPASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("Enabling password-less login to %s(%s) failed", node, attrs['ip'])
sys.exit(1)
# gen_ssh_keys is used for generating ssh keys on the local node
# if not present, required for enabling password-less login
# to configured remote nodes.
def gen_ssh_keys():
ssh_keys_dir = os.path.join(os.getenv('HOME'), '.ssh')
ssh_priv_key_path = os.path.join(ssh_keys_dir, 'id_rsa')
ssh_pub_key_path = os.path.join(ssh_keys_dir, 'id_rsa.pub')
if not os.path.exists(os.path.join(ssh_keys_dir)):
os.mkdir(ssh_keys_dir, mode = 0o700)
privkey = RSA.generate(2048)
with open(ssh_priv_key_path, 'wb') as f:
os.chmod(ssh_priv_key_path, 0o600)
f.write(privkey.exportKey('PEM'))
pubkey = privkey.publickey()
with open(ssh_pub_key_path, 'wb') as f:
os.chmod(ssh_pub_key_path, 0o644)
f.write(pubkey.exportKey('OpenSSH'))
# enable_passwordless_login is used for enabling password-less
# login from local node to configured remote nodes.
def enable_passwordless_login():
if not os.path.exists(os.path.join(os.getenv('HOME'), '.ssh/id_rsa.pub')):
logger.info("SSH keys does not exist, generating now")
gen_ssh_keys()
copy_ssh_keys_remote_host()
# dup_dir is used for duplicating the directory contents
def dup_dir(src, dest):
# if source is a directory, create destination
# directory and copy each files
if os.path.isdir(src):
if not os.path.isdir(dest):
os.mkdir(dest, 0o755)
file_list = glob.glob(src + '/*')
for file in file_list:
dup_dir(file, dest + '/' + file.split('/')[-1])
else:
shutil.copy(src, dest)
# helper_msg is used for logging any message
# to help the user with next steps or hints
def helper_msg():
logger.info("Perform below steps to enable current user to use kubectl")
print("""
--- mkdir -p $HOME/.kube
--- sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
--- sudo chown $(id -u):$(id -g) $HOME/.kube/config
""")
# check_time_sync is used for checking if time in all nodes
# provided in the configuration is in sync.
def check_time_sync():
logger.info("Checking if time on all nodes provided are in sync")
host_time_map = {}
# fetch date and time from any one of the master node, if not new deployment
if K8S_INVENTORY_DATA != None:
for node, attrs in K8S_INVENTORY_DATA['all']['hosts'].items():
cmd = '/usr/bin/ssh {username}@{ipaddr} date'.format(username=os.getenv('USER'), ipaddr=attrs['ip'])
host_time_map[node] = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE).stdout.decode('utf-8').rstrip('\n')
break
# fetch date and time from each of the node configured for k8s deployment
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
cmd = '/usr/bin/ssh {username}@{ipaddr} date'.format(username=attrs['username'], ipaddr=attrs['ip'])
host_time_map[node] = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE).stdout.decode('utf-8').rstrip('\n')
baseTimeInfo = []
for host, time in host_time_map.items():
logger.debug("Timestamp fetched from [%s] is [%s]", host, time)
itemCount = len(baseTimeInfo)
if itemCount == 0:
baseTimeInfo = time.split(' ')
continue
#['Wed', 'Aug', '12', '09:47:14', 'UTC', '2020']
timeToCompare = time.split(' ')
i = 0
while i < itemCount:
# If next element to be parsed is time, will find
# it by looking for ':' substring
if (baseTimeInfo[i].find(':') == -1):
if baseTimeInfo[i] != timeToCompare[i]:
logger.critical("Time in %s(%s) is not in sync with other nodes", host, time)
sys.exit(1)
else:
# Compare time
timeStr1 = baseTimeInfo[i].split(':')
timeStr2 = timeToCompare[i].split(':')
if len(timeStr1) != 3 or len(timeStr2) != 3:
logger.critical("Timestamp fetched from %s(%s) is not in expected format", host, time)
sys.exit(1)
# Compare time by converting hours into minutes and add the elasped minutes too,
# the difference should not be greater than 1 minute
baseTimeInMins = (int(timeStr1[0]) * 60) + int(timeStr1[1])
timeToCompareInMins = (int(timeStr2[0]) * 60) + int(timeStr2[1])
if timeToCompareInMins - baseTimeInMins > 1:
logger.critical("Time in %s(%s) is not in sync with other nodes", host, time)
sys.exit(1)
i += 1
baseTimeInfo = time.split(' ')
# scale_in_k8s is for removing nodes from the exisitng k8s cluster
# Configuration passed will be parsed to find the nodes to be removed
# and the existing hosts.yaml created for the deployment_id will be updated.
# kubespray ansible command with remove-node.yaml will be invoked for removing
# the nodes.
def scale_in_k8s():
cur_dir = os.getcwd()
os.chdir(KUBESPRAY_SRC_PATH)
no_nodes_to_remove = True
existing_nodes = ""
for node, attrs in K8S_INVENTORY_DATA['all']['hosts'].items():
existing_nodes += '{hostname}\n'.format(hostname=node)
# Conf data will be parsed to find the nodes to be removed from cluster,
# and if found any will updatei the hosts.yaml file and also create a new
# hosts_rm.yaml just for removal operation.
rm_nodes = ""
nodes_list = ""
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
if node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts']:
logger.warn("%s is master node, removing of which is not allowed, skipping!!!", node)
continue
if node in K8S_INVENTORY_DATA['all']['hosts'].keys():
no_nodes_to_remove = False
rm_nodes += '{hostname}\n'.format(hostname=node)
nodes_list += '{hostname},'.format(hostname=node)
K8S_INVENTORY_DATA['all']['hosts'].pop(node)
K8S_INVENTORY_DATA['all']['children']['etcd']['hosts'].pop(node, 'No Key found')
K8S_INVENTORY_DATA['all']['children']['kube-node']['hosts'].pop(node, 'No Key found')
else:
logger.info("%s node is not part of the existing cluster, skipped", node)
if no_nodes_to_remove:
logger.info("No nodes to remove from the cluster %s, no changes made", DEPLOYMENT_ID)
return
nodes_list = nodes_list.rstrip(',')
logger.info("Current k8s deployment has below nodes\n%s" %(existing_nodes))
logger.info("Nodes to be removed from the cluster are\n%s" %(rm_nodes))
if not NO_PROMPT_SET:
confirmation = input("Confirm to proceed with the scale-in action [y/n]: ")
if confirmation != 'y' and confirmation != 'n':
logger.critical("Invalid input, exiting!!!")
sys.exit(1)
if confirmation == 'n':
sys.exit(0)
if not DRY_RUN_SET:
load_password_from_vault(cur_dir)
logger.info("Starting k8s cluster scale-in")
k8s_rm_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root --extra-vars "node={rm_node_list}" -e delete_nodes_confirmation=yes remove-node.yml'.format(host_conf_file=K8S_INVENTORY_FILE, rm_node_list=nodes_list)
ret = exec(k8s_rm_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("k8s cluster scale-in failed")
os.chdir(cur_dir)
sys.exit(1)
# copy controller config file
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/post-uninstall/files/odim_controller_config.yaml')
odimra_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/odimra-delete-image/files/odimra_config_values.yaml')
shutil.copyfile(CONTROLLER_CONF_FILE, helm_config_file)
shutil.copyfile(CONTROLLER_CONF_FILE, odimra_config_file)
os.chdir(ODIMRA_SRC_PATH)
logger.info("Performing post-uninstall action on nodes %s", nodes_list)
odimra_rm_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={nodes} helm_config_file={helm_config_file} ignore_err=True" post_uninstall.yaml'.format( \
host_conf_file=K8S_INVENTORY_FILE, nodes=nodes_list, helm_config_file=CONTROLLER_CONF_FILE)
ret = exec(odimra_rm_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.error("Post-uninstall action failed on nodes %s", nodes_list)
else:
logger.info("Post-uninstall action was successful on nodes %s", nodes_list)
logger.info("Deleting k8s images")
delete_k8_images(K8S_INVENTORY_FILE,nodes_list)
# remove copy of controller config file created
os.remove(helm_config_file)
os.remove(odimra_config_file)
SafeDumper.add_representer(type(None),lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:null', ''))
with open(K8S_INVENTORY_FILE, 'w') as f:
yaml.safe_dump(K8S_INVENTORY_DATA, f, default_flow_style=False)
os.chdir(cur_dir)
logger.info("Completed k8s cluster scale-in")
# scale_out_k8s is for adding new nodes to the exisitng k8s cluster
# Configuration passed will be parsed to find the new nodes to be added
# and the existing hosts.yaml created for the deployment_id will be updated.
# kubespray ansible command with scale.yaml will be invoked for deploying
# the new nodes.
def scale_out_k8s():
# if not HA deployment, restrict scaling out of nodes
if 'haDeploymentEnabled' not in CONTROLLER_CONF_DATA['odimra'] or \
not CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] or \
len(K8S_INVENTORY_DATA['all']['hosts']) == 1:
logger.critical("Scaling out of single node deployment is not allowed")
return
cur_dir = os.getcwd()
os.chdir(KUBESPRAY_SRC_PATH)
no_new_nodes_to_add = True
existing_nodes = ""
for node, attrs in K8S_INVENTORY_DATA['all']['hosts'].items():
existing_nodes += '{hostname}\n'.format(hostname=node)
# Conf data will be parsed to find the new nodes, and if found any
# will update the hosts.yaml file.
new_nodes = ""
nodes_list = ""
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
if node not in K8S_INVENTORY_DATA['all']['hosts'].keys():
no_new_nodes_to_add = False
new_nodes += '{hostname}\n'.format(hostname=node)
nodes_list += '{hostname},'.format(hostname=node)
temp_dict = {node : {'ansible_host': attrs['ip'], 'ip':attrs['ip'], 'access_ip':attrs['ip']}}
K8S_INVENTORY_DATA['all']['hosts'].update(temp_dict)
temp_dict = {node: None}
K8S_INVENTORY_DATA['all']['children']['kube-node']['hosts'].update(temp_dict)
if no_new_nodes_to_add:
logger.info("No new nodes to add to cluster %s, no changes made", DEPLOYMENT_ID)
return
logger.info("Current k8s deployment has below nodes\n%s" %(existing_nodes))
logger.info("New nodes to be added are\n%s" %(new_nodes))
if not NO_PROMPT_SET:
confirmation = input("Confirm to proceed with the scale-out action [y/n]: ")
if confirmation != 'y' and confirmation != 'n':
logger.critical("Invalid input, exiting!!!")
sys.exit(1)
if confirmation == 'n':
sys.exit(0)
if not DRY_RUN_SET:
logger.info("Starting k8s cluster scale-out")
load_password_from_vault(cur_dir)
# Enable password-less login for the new node
enable_passwordless_login()
# Check if the new node time is in sync with other nodes
check_time_sync()
SafeDumper.add_representer(type(None),lambda dumper, value: dumper.represent_scalar(u'tag:yaml.org,2002:null', ''))
with open(K8S_INVENTORY_FILE, 'w') as f:
yaml.safe_dump(K8S_INVENTORY_DATA, f, default_flow_style=False)
nodes_list = nodes_list.rstrip(',')
update_ansible_conf()
# copy k8 images if provided
copy_k8_images(K8S_INVENTORY_FILE,nodes_list)
k8s_add_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root scale.yml'.format( \
host_conf_file=K8S_INVENTORY_FILE)
ret = exec(k8s_add_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("k8s cluster scale-out failed")
os.chdir(cur_dir)
sys.exit(1)
# copy controller config file
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/pre-install/files/helmcharts/helm_config_values.yaml')
odimra_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/odimra-copy-image/files/odimra_config_values.yaml')
shutil.copyfile(CONTROLLER_CONF_FILE, helm_config_file)
shutil.copyfile(CONTROLLER_CONF_FILE, odimra_config_file)
os.chdir(ODIMRA_SRC_PATH)
logger.info("Performing ODIMRA pre-install action nodes %s", nodes_list)
odimra_add_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={nodes}" pre_install.yaml'.format( \
host_conf_file=K8S_INVENTORY_FILE, nodes=nodes_list)
ret = exec(odimra_add_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("ODIMRA pre-install action failed on nodes %s", nodes_list)
os.chdir(cur_dir)
sys.exit(1)
else:
logger.info("ODIMRA pre-install action was successful on nodes %s", nodes_list)
# remove copy of controller config file created
os.remove(helm_config_file)
os.remove(odimra_config_file)
os.chdir(cur_dir)
logger.info("Completed k8s cluster scale-out")
def delete_k8_images(host_file,nodes_list):
cur_dir = os.getcwd()
os.chdir(ODIMRA_SRC_PATH)
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/k8-delete-image/files/helm_config_values.yaml')
shutil.copyfile(CONTROLLER_CONF_FILE, helm_config_file)
k8s_delete_deploy_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root --extra-vars "host={nodes} ignore_err={ignore_err}" k8_delete_image.yaml'.format(host_conf_file=host_file,nodes=nodes_list,ignore_err=IGNORE_ERRORS_SET)
ret = exec(k8s_delete_deploy_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.error("k8s image deletion failed, if needed delete images manually")
os.chdir(cur_dir)
os.remove(helm_config_file)
os.chdir(KUBESPRAY_SRC_PATH)
# remove_k8s is used for removing k8s deployment
# from the nodes provided in the odim-controller conf
def remove_k8s():
cur_dir = os.getcwd()
os.chdir(KUBESPRAY_SRC_PATH)
global DEPLOYMENT_SRC_DIR
DEPLOYMENT_SRC_DIR = './inventory/k8s-cluster-' + DEPLOYMENT_ID
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not os.path.exists(host_file):
logger.critical("Previous deployment data not found for %s, make sure deployment_id is correct", DEPLOYMENT_ID)
sys.exit(1)
with open(host_file) as f:
host_data = yaml.load(f, Loader=yaml.FullLoader)
nodes = ""
nodes_list = ""
for node, attrs in host_data['all']['hosts'].items():
nodes += '{hostname}\n'.format(hostname=node)
nodes_list += '{hostname},'.format(hostname=node)
logger.info("k8s deployment in below nodes will be reset\n%s" %(nodes))
if not NO_PROMPT_SET:
confirmation = input("Confirm to proceed with reset action [y/n]: ")
if confirmation != 'y' and confirmation != 'n':
logger.critical("Invalid input, exiting!!!")
sys.exit(1)
if confirmation == 'n':
sys.exit(0)
if not DRY_RUN_SET:
load_password_from_vault(cur_dir)
logger.info("Starting k8s cluster reset")
k8s_reset_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root -e reset_confirmation=yes reset.yml'.format(host_conf_file=host_file)
ret = exec(k8s_reset_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("k8s cluster reset failed")
os.chdir(cur_dir)
sys.exit(1)
logger.info("Deleteing k8s images")
delete_k8_images(host_file,nodes_list)
logger.debug("Clearing deployment specific data of %s cluster" %(DEPLOYMENT_ID))
shutil.rmtree(DEPLOYMENT_SRC_DIR)
os.chdir(cur_dir)
logger.info("Completed k8s cluster reset")
# deploy docker , copy & load k8 images provided
def copy_k8_images(host_file,nodes_list):
if KUBERNETES_IMAGE_PATH!="":
cur_dir = os.getcwd()
os.chdir(ODIMRA_SRC_PATH)
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/k8-copy-image/files/helm_config_values.yaml')
shutil.copyfile(CONTROLLER_CONF_FILE, helm_config_file)
k8s_image_deploy_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root --extra-vars "host={nodes}" k8_copy_image.yaml'.format(host_conf_file=host_file, nodes=nodes_list)
ret = exec(k8s_image_deploy_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("k8s image deployment failed")
os.chdir(cur_dir)
sys.exit(1)
os.remove(helm_config_file)
os.chdir(KUBESPRAY_SRC_PATH)
# deploy_k8s is used for deploying k8s
# in the nodes provided in odim-controller conf
def deploy_k8s():
cur_dir = os.getcwd()
os.chdir(KUBESPRAY_SRC_PATH)
host_file = os.path.join(DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if os.path.exists(host_file):
logger.error("Cluster with deployment ID %s already exists" %(DEPLOYMENT_ID))
sys.exit(1)
node_ip_list = ""
nodes_list = ""
for node, attrs in CONTROLLER_CONF_DATA['nodes'].items():
node_ip_list += "%s,%s,%s " %(node, attrs['ip'], attrs['ip'])
nodes_list += '{hostname},'.format(hostname=node)
nodes_list = nodes_list.rstrip(',')
if not DRY_RUN_SET:
logger.info("Starting k8s cluster deployment")
load_password_from_vault(cur_dir)
# Enable password-less login for the new node
enable_passwordless_login()
# Check if the new node time is in sync with other nodes
check_time_sync()
# replicate the sample inventory data provided by
# kubespray to create inventory for requested cluster
dup_dir('./inventory/sample', DEPLOYMENT_SRC_DIR)
logger.info("Generating hosts file required for k8s cluster deployment")
host_file_gen_cmd = 'CONFIG_FILE={host_conf_file} python3 contrib/inventory_builder/inventory.py {node_details_list}'.format( \
host_conf_file=host_file, node_details_list=node_ip_list)
ret = exec(host_file_gen_cmd, {'KUBE_MASTERS_MASTERS': '3'})
if ret != 0:
logger.critical("k8s cluster hosts file generation failed")
os.chdir(cur_dir)
sys.exit(1)
# update proxy info in ansible conf
update_ansible_conf()
# Copy K8 images if absolute path for images is provided
copy_k8_images(host_file,nodes_list)
k8s_deploy_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root cluster.yml'.format(host_conf_file=host_file)
ret = exec(k8s_deploy_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("k8s cluster deployment failed")
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
logger.info("Completed k8s cluster deployment")
def read_file(filepath):
return open(filepath, 'r').read()
# represent_yaml_multline_str formats multiline data
# by using '|' charater to denote the same.
# It accepts yaml writer and the data needs to be
# written and returns the formatted data.
def represent_yaml_multline_str(dumper, data):
if '\n' in data:
dumper.represent_scalar(u'tag:yaml.org,2002:null', '')
return dumper.represent_scalar(u'tag:yaml.org,2002:str', data, style='|')
return dumper.org_represent_str(data)
def reload_odimra_certs():
if 'odimCertsPath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['odimCertsPath'] == None or \
CONTROLLER_CONF_DATA['odimCertsPath'] == "":
logger.critical("ODIM-RA certificates path does not exist")
sys.exit(1)
cert_dir = CONTROLLER_CONF_DATA['odimCertsPath']
if os.path.exists(os.path.join(cert_dir, '.gen_odimra_certs.ok')):
gen_cert_tool = os.path.join(CONTROLLER_SRC_PATH, 'gen_odimra_certs.sh')
gen_cert_cmd = '/bin/bash {gen_cert_script} {cert_dir} {config_file}'.format(gen_cert_script=gen_cert_tool, cert_dir=cert_dir, config_file=CONTROLLER_CONF_FILE)
ret = exec(gen_cert_cmd, {})
if ret != 0:
logger.critical("ODIM-RA certificate generation failed")
sys.exit(1)
load_odimra_certs(True)
# load certificates present at configured path
# to be used for creating k8s secrets
def load_odimra_certs(isUpgrade):
cert_dir = CONTROLLER_CONF_DATA['odimCertsPath']
CONTROLLER_CONF_DATA['odimra']['rootCACert'] = read_file(os.path.join(cert_dir, 'rootCA.crt'))
CONTROLLER_CONF_DATA['odimra']['odimraServerCert'] = read_file(os.path.join(cert_dir, 'odimra_server.crt'))
CONTROLLER_CONF_DATA['odimra']['odimraServerKey'] = read_file(os.path.join(cert_dir, 'odimra_server.key'))
CONTROLLER_CONF_DATA['odimra']['odimraKafkaClientCert'] = read_file(os.path.join(cert_dir, 'odimra_kafka_client.crt'))
CONTROLLER_CONF_DATA['odimra']['odimraKafkaClientKey'] = read_file(os.path.join(cert_dir, 'odimra_kafka_client.key'))
# updating key pair once after deployment is not supported.
if not isUpgrade:
CONTROLLER_CONF_DATA['odimra']['odimraRSAPublicKey'] = read_file(os.path.join(cert_dir, 'odimra_rsa.public'))
CONTROLLER_CONF_DATA['odimra']['odimraRSAPrivateKey'] = read_file(os.path.join(cert_dir, 'odimra_rsa.private'))
# reload odim-controller conf with cert data
yaml.SafeDumper.org_represent_str = yaml.SafeDumper.represent_str
yaml.add_representer(str, represent_yaml_multline_str, Dumper=yaml.SafeDumper)
with open(CONTROLLER_CONF_FILE, 'w') as f:
yaml.safe_dump(CONTROLLER_CONF_DATA, f, default_flow_style=False)
# perform pre-requisites required for
# deploying ODIM-RA services
def perform_odimra_deploy_prereqs():
if 'odimCertsPath' not in CONTROLLER_CONF_DATA or \
CONTROLLER_CONF_DATA['odimCertsPath'] == None or \
CONTROLLER_CONF_DATA['odimCertsPath'] == "":
cert_dir = os.path.join(CONTROLLER_SRC_PATH, 'certs', DEPLOYMENT_ID)
if not os.path.exists(cert_dir):
os.mkdir(cert_dir, mode = 0o700)
CONTROLLER_CONF_DATA['odimCertsPath'] = cert_dir
gen_cert_tool = os.path.join(CONTROLLER_SRC_PATH, 'gen_odimra_certs.sh')
gen_cert_cmd = '/bin/bash {gen_cert_script} {cert_dir} {config_file}'.format(gen_cert_script=gen_cert_tool, cert_dir=cert_dir, config_file=CONTROLLER_CONF_FILE)
ret = exec(gen_cert_cmd, {})
if ret != 0:
logger.critical("ODIM-RA certificate generation failed")
sys.exit(1)
else:
if not os.path.isdir(CONTROLLER_CONF_DATA['odimCertsPath']):
logger.critical("ODIM-RA certificates path does not exist")
sys.exit(1)
load_odimra_certs(False)
# perform pre-requisites for HA deployment
def perform_check_ha_deploy():
write_flag=1
if 'haDeploymentEnabled' not in CONTROLLER_CONF_DATA['odimra'] or CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] == None:
if len(CONTROLLER_CONF_DATA['nodes']) < 3:
logger.warning("Nodes provided for ODIMRA deployment is %s. \
ODIMRA-HA Deployment requires minimum 3 nodes for deployment." %(len(CONTROLLER_CONF_DATA['nodes'])))
logger.info("Setting HA Deployment to DISABLED")
CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] = False
HA_DEPLOYMENT = False
write_flag=0
else:
logger.info("HA Deployment set to ENABLED")
CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] = True
HA_DEPLOYMENT = True
write_flag=0
elif 'haDeploymentEnabled' in CONTROLLER_CONF_DATA['odimra'] and \
CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled']:
if len(CONTROLLER_CONF_DATA['nodes']) < 3:
logger.warning("Nodes provided for ODIMRA deployment is %s. \
ODIMRA-HA Deployment requires minimum 3 nodes for deployment." %(len(CONTROLLER_CONF_DATA['nodes'])))
logger.info("Setting HA Deployment to DISABLED")
CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] = False
HA_DEPLOYMENT = False
write_flag=0
else:
logger.info("HA Deployment set to ENABLED")
else:
logger.info("HA Deployment set to DISABLED")
HA_DEPLOYMENT = False
if write_flag == 0:
# reload odim-controller conf with haDeployment param
yaml.SafeDumper.org_represent_str = yaml.SafeDumper.represent_str
yaml.add_representer(str, represent_yaml_multline_str, Dumper=yaml.SafeDumper)
with open(CONTROLLER_CONF_FILE, 'w') as f:
yaml.safe_dump(CONTROLLER_CONF_DATA, f, default_flow_style=False)
# operation_odimra is used for deploying/removing ODIMRA
# in the nodes provided in odim-controller conf based on the operation input
def operation_odimra(operation):
cur_dir = os.getcwd()
os.chdir(ODIMRA_SRC_PATH)
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not os.path.exists(host_file):
logger.error("Host file not found for deployment id %s" %(DEPLOYMENT_ID))
sys.exit(1)
if not DRY_RUN_SET:
load_password_from_vault(cur_dir)
# set options based on the operation type
helm_config_file = ""
odimra_config_file = ""
if operation == "install":
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/pre-install/files/helmcharts/helm_config_values.yaml')
odimra_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/odimra-copy-image/files/odimra_config_values.yaml')
perform_odimra_deploy_prereqs()
elif operation == "uninstall":
helm_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/post-uninstall/files/odim_controller_config.yaml')
odimra_config_file = os.path.join(ODIMRA_SRC_PATH, 'roles/odimra-delete-image/files/odimra_config_values.yaml')
shutil.copyfile(CONTROLLER_CONF_FILE, helm_config_file)
shutil.copyfile(CONTROLLER_CONF_FILE, odimra_config_file)
# as rollback of failed operation is not handled yet
# will try on first master node and exit on failure
master_node = list(K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].keys())[0]
logger.info("Starting odimra %s on master node %s", operation, master_node)
odimra_deploy_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} helm_config_file={helm_config_file} ignore_err={ignore_err}" \
{operation_conf_file}.yaml'.format(host_conf_file=host_file, master_node=master_node, helm_config_file=CONTROLLER_CONF_FILE, \
operation_conf_file=operation,ignore_err=IGNORE_ERRORS_SET)
ret = exec(odimra_deploy_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
# remove copy of controller config file created
os.remove(helm_config_file)
os.remove(odimra_config_file)
if ret != 0:
logger.critical("ODIMRA %s failed on master node %s", operation, master_node)
os.chdir(cur_dir)
sys.exit(1)
if operation == "uninstall" and os.path.exists(os.path.join(CONTROLLER_CONF_DATA['odimCertsPath'], '.gen_odimra_certs.ok')):
logger.info("Cleaning up certificates generated for the deployment")
shutil.rmtree(CONTROLLER_CONF_DATA['odimCertsPath'])
logger.info("Completed ODIMRA %s operation", operation)
os.chdir(cur_dir)
def cleanUp():
if DEPLOYMENT_SRC_DIR != "":
path = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR)
logger.info("Cleaning up temp directory : %s", path)
shutil.rmtree(path)
# install_k8s is for performing all the necessary steps
# for deploying k8s cluster
def install_k8s():
logger.info("Installing kubernetes")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# Check for HA deployment
perform_check_ha_deploy()
# Initiate k8s deployment
deploy_k8s()
sys.exit(0)
# reset_k8s is for performing all the necessary steps
# for removing k8s from the deployed nodes
def reset_k8s():
logger.info("Resetting kubernetes")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks(skip_opt_param_check=True)
# Remove k8s from the deployed nodes
remove_k8s()
sys.exit(0)
# install_odimra is for performing all the necessary steps for installing ODIMRA
def install_odimra():
logger.info("Installing ODIMRA")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
# Initiate ODIMRA deployment
operation_odimra("install")
sys.exit(0)
# uninstall_odimra is used for performing all the necessary steps for uninstalling ODIMRA
def uninstall_odimra():
logger.info("Uninstalling ODIMRA")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
# Initiate ODIMRA removal
operation_odimra("uninstall")
sys.exit(0)
# add_k8s_node is for performing all the necessary steps
# for adding a new node to existing k8s cluster
def add_k8s_node():
logger.info("Adding new node to existing kubernetes cluster")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
# Initiate k8s deployment on new nodes
scale_out_k8s()
sys.exit(0)
# rm_k8s_node is for performing all the necessary steps
# for removing a node from the existing k8s cluster
def rm_k8s_node():
logger.info("Removing a node from the existing kubernetes cluster")
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
# Initiate node removal from k8s deployment
scale_in_k8s()
sys.exit(0)
# generateRandomAlphaNum geneartes generates a random
# string of requested length containing alphanumeric and
# special characters from the defined set
def generateRandomAlphaNum(length):
random_char_set = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-{}<>+[]$?@:;()%,'
return ''.join((random.choice(random_char_set) for i in range(length)))
# store_vault_key checks if vault password file exists,
# if not creates the file by asking user for the password
# else returns without performing any action.
def store_vault_key():
global ODIMRA_VAULT_KEY_FILE
user_home = os.getenv("HOME")
odimra_vault_dir = os.path.join(user_home, '.odimra')
if not os.path.exists(odimra_vault_dir):
os.mkdir(odimra_vault_dir, mode = 0o700)
ODIMRA_VAULT_KEY_FILE = os.path.join(odimra_vault_dir, '.key_dnd.dat')
if not os.path.exists(ODIMRA_VAULT_KEY_FILE):
print("\nProvide password for vault")
pw_from_prompt = lambda: (getpass.getpass('Enter Password: '), getpass.getpass('Confirm Password: '))
first_pw, second_pw = pw_from_prompt()
if first_pw != second_pw:
logger.critical("Passwords provided do not match")
sys.exit(1)
fd = open(ODIMRA_VAULT_KEY_FILE, "wb")
fd.write(first_pw.encode('utf-8'))
fd.close()
encode_cmd = '{vault_bin} -encode {key_file}'.format(vault_bin=ODIMRA_VAULT_BIN, key_file=ODIMRA_VAULT_KEY_FILE)
ret = exec(encode_cmd, {})
if ret != 0:
logger.critical("storing vault key failed")
sys.exit(1)
return
# store_password_in_vault stores the nodes sudo
# password securely by encrypting using odimra vault
def store_password_in_vault():
global ANSIBLE_BECOME_PASS
print("\nProvide sudo password of the nodes")
pw_from_prompt = lambda: (getpass.getpass('Enter Password: '), getpass.getpass('Confirm Password: '))
first_pw, second_pw = pw_from_prompt()
if first_pw != second_pw:
logger.critical("Passwords provided do not match")
sys.exit(1)
fd = open(ANSIBLE_SUDO_PW_FILE, "wb")
fd.write(first_pw.encode('utf-8'))
fd.close()
encrypt_cmd = '{vault_bin} -key {key_file} -encrypt {data_file}'.format(vault_bin=ODIMRA_VAULT_BIN,
key_file=ODIMRA_VAULT_KEY_FILE, data_file=ANSIBLE_SUDO_PW_FILE)
ret = exec(encrypt_cmd, {})
if ret != 0:
logger.critical("storing node password failed")
sys.exit(1)
ANSIBLE_BECOME_PASS = first_pw
# load_password_from_vault loads the sudo password of nodes
# of present cluster securely stored usign ansible vault
def load_password_from_vault(cur_dir):
global ANSIBLE_BECOME_PASS
decrypt_cmd = '{vault_bin} -key {key_file} -decrypt {data_file}'.format(vault_bin=ODIMRA_VAULT_BIN,
key_file=ODIMRA_VAULT_KEY_FILE, data_file=ANSIBLE_SUDO_PW_FILE)
execHdlr = subprocess.Popen(decrypt_cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
universal_newlines=True)
try:
std_out, std_err = execHdlr.communicate()
except TimeoutExpired:
execHdlr.kill()
if execHdlr.returncode != 0 or std_out == "":
print(std_out.strip())
logger.critical("failed to read node password")
os.chdir(cur_dir)
sys.exit(1)
ANSIBLE_BECOME_PASS = std_out.rstrip('\n')
# check_extract_kubespray_src is used for invoking
# a script, after checking and if not exists, to extract
# kubespary source bundle
def check_extract_kubespray_src():
if not os.path.isdir(os.path.join(KUBESPRAY_SRC_PATH, "inventory")):
kubespray_extract_tool = os.path.join(KUBESPRAY_SRC_PATH, 'configure-kubespray.sh')
kubespray_extract_cmd = '/bin/bash {kubespray_extract_tool} {kubespray_src_path}'.format( \
kubespray_extract_tool=kubespray_extract_tool, kubespray_src_path=KUBESPRAY_SRC_PATH)
ret = exec(kubespray_extract_cmd, {})
if ret != 0:
logger.critical("Extracting and configuring kubespray failed")
sys.exit(1)
def read_groupvar():
global GROUP_VAR_DATA
group_var_file = ODIMRA_SRC_PATH+'/group_vars/all'
if not os.path.isfile(group_var_file):
logger.critical("invalid group_var file %s passed, exiting!!!", group_var_file)
sys.exit(1)
logger.debug("Reading group_var file %s", group_var_file)
with open(group_var_file) as f:
GROUP_VAR_DATA = yaml.load(f, Loader=yaml.FullLoader)
# upgrade_config_map update the config maps
def upgrade_config_map(config_map_name):
logger.info("Upgrading config map"+config_map_name)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
#loading the group_all yaml and finding helm chart full name
read_groupvar()
helm_chart_list=config_map_name.split(",")
for data in helm_chart_list:
if data == "all":
odiraConfigHelmChartData= GROUP_VAR_DATA["odim_pv_pvc_secrets_helmcharts"]
for helm_chart_name in odiraConfigHelmChartData:
if 'pv-pvc' in helm_chart_name:
continue
update_helm_charts(helm_chart_name)
odimHelmChartData= GROUP_VAR_DATA["odim_svc_helmcharts"]
for helm_chart_name in odimHelmChartData:
update_helm_charts(helm_chart_name)
thirdPartyHelmCharts=GROUP_VAR_DATA["odim_third_party_helmcharts"]
for helm_chart_name in thirdPartyHelmCharts:
update_helm_charts(helm_chart_name)
deploy_plugin('all')
elif data == "odimra":
odiraConfigHelmChartData= GROUP_VAR_DATA["odim_pv_pvc_secrets_helmcharts"]
for helm_chart_name in odiraConfigHelmChartData:
if 'pv-pvc' in helm_chart_name:
continue
update_helm_charts(helm_chart_name)
odimHelmChartData= GROUP_VAR_DATA["odim_svc_helmcharts"]
for helm_chart_name in odimHelmChartData:
update_helm_charts(helm_chart_name)
deploy_plugin('all')
elif data == 'thirdparty':
thirdPartyHelmCharts=GROUP_VAR_DATA["odim_third_party_helmcharts"]
for helm_chart_name in thirdPartyHelmCharts:
update_helm_charts(helm_chart_name)
else:
update_helm_charts(data)
def update_helm_charts(config_map_name):
optionHelmChartInfo = {
"odimra-config":"odim_pv_pvc_secrets_helmcharts",
"odimra-platformconfig":"odim_pv_pvc_secrets_helmcharts",
"odimra-secret":"odim_pv_pvc_secrets_helmcharts",
"kafka-secret":"odim_pv_pvc_secrets_helmcharts",
"zookeeper-secret":"odim_pv_pvc_secrets_helmcharts",
"configure-hosts":"odim_pv_pvc_secrets_helmcharts",
"odimra-k8s-access-config":"odim_pv_pvc_secrets_helmcharts",
"account-session":"odim_svc_helmcharts",
"aggregation":"odim_svc_helmcharts",
"api":"odim_svc_helmcharts",
"events":"odim_svc_helmcharts",
"fabrics":"odim_svc_helmcharts",
"managers":"odim_svc_helmcharts",
"systems":"odim_svc_helmcharts",
"task":"odim_svc_helmcharts",
"update":"odim_svc_helmcharts",
"kafka":"odim_third_party_helmcharts",
"zookeeper":"odim_third_party_helmcharts",
"redis":"odim_third_party_helmcharts",
"consul":"odim_third_party_helmcharts"
}
operationHelmChartInfo={
"odimra-config":"upgrade-config",
"odimra-platformconfig":"upgrade-config",
"odimra-secret":"upgrade-config",
"kafka-secret":"upgrade-config",
"zookeeper-secret":"upgrade-config",
"configure-hosts":"upgrade-config",
"odimra-k8s-access-config":"upgrade-config",
"account-session":"upgrade-config",
"aggregation":"upgrade-config",
"api":"upgrade-config",
"events":"upgrade-config",
"fabrics":"upgrade-config",
"managers":"upgrade-config",
"systems":"upgrade-config",
"task":"upgrade-config",
"update":"upgrade-config",
"kafka":"upgrade_thirdparty",
"zookeeper":"upgrade_thirdparty",
"redis":"upgrade_thirdparty",
"consul":"upgrade_thirdparty"
}
if config_map_name not in optionHelmChartInfo:
logger.critical("%s upgrade is not supported!!!", config_map_name)
sys.exit(1)
helmCharatGroupName=optionHelmChartInfo[config_map_name]
if 'haDeploymentEnabled' in CONTROLLER_CONF_DATA['odimra'] and \
CONTROLLER_CONF_DATA['odimra']['haDeploymentEnabled'] and \
helmCharatGroupName == 'odim_third_party_helmcharts':
helmCharatGroupName = 'odim_third_party_ha_helmcharts'
operationName=operationHelmChartInfo[config_map_name]
helmchartData=GROUP_VAR_DATA[helmCharatGroupName]
fullHelmChartName = helmchartData[config_map_name]
if fullHelmChartName=='':
logger.critical("%s upgrade is not supported!!!", config_map_name)
sys.exit(1)
logger.info('Full helm chart name %s',fullHelmChartName)
cur_dir = os.getcwd()
os.chdir(ODIMRA_SRC_PATH)
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not os.path.exists(host_file):
logger.error("Host file not found for deployment id %s" %(DEPLOYMENT_ID))
sys.exit(1)
if not DRY_RUN_SET:
load_password_from_vault(cur_dir)
# check if certs needs to be generated or loaded again
if 'secret' in config_map_name:
reload_odimra_certs()
upgrade_flag = False
if "third_party" in helmCharatGroupName or helmCharatGroupName =='odim_svc_helmcharts':
if ODIMRA_IMAGE_PATH == "":
logger.warning("odimra image source path not configured, expecting user to copy & load all the required odimra docker images on cluster nodes !!!")
else:
nodes_list = ""
for node, attrs in K8S_INVENTORY_DATA['all']['hosts'].items():
nodes_list += '{hostname},'.format(hostname=node)
nodes_list = nodes_list.rstrip(',')
dockerImageName=GROUP_VAR_DATA['odim_docker_images'][config_map_name]
logger.info("Start copying of docker images for %s",config_map_name)
docker_copy_image_command= 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "docker_image_name={docker_image_name} helm_config_file={helm_config_file} host={nodes} ignore_err={ignore_err}" pre_upgrade.yaml'.format(\
host_conf_file=host_file,docker_image_name=dockerImageName,\
helm_config_file=CONTROLLER_CONF_FILE,\
nodes=nodes_list,\
ignore_err=IGNORE_ERRORS_SET)
ret = exec(docker_copy_image_command, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("ODIMRA %s failed to copy docker image %s", operationName, dockerImageName)
os.chdir(cur_dir)
sys.exit(1)
else:
logger.info("ODIMRA %s success copy docker image %s", operationName, dockerImageName)
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting upgrade of %s on master node %s", fullHelmChartName, master_node[0])
odimra_upgrade_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} helm_chart_name={helm_chart_name} helm_chart_name_version={helm_chart_name_version} helm_config_file={helm_config_file} ignore_err={ignore_err}" {operation_conf_file}.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
helm_chart_name=config_map_name, \
helm_chart_name_version=fullHelmChartName, \
helm_config_file=CONTROLLER_CONF_FILE, \
operation_conf_file=operationName,ignore_err=IGNORE_ERRORS_SET)
ret = exec(odimra_upgrade_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("ODIMRA %s failed when tried on master node %s", operationName, master_node[0])
else:
logger.info("ODIMRA %s success on master node %s", operationName, master_node[0])
upgrade_flag=True
break
if upgrade_flag:
logger.info("Completed ODIMRA %s operation", operationName)
else:
logger.info("Could not %s ODIMRA on any master nodes", operationName)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def list_deployments():
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
if 'namespace' not in CONTROLLER_CONF_DATA['odimra'] or \
CONTROLLER_CONF_DATA['odimra']['namespace'] == None or \
CONTROLLER_CONF_DATA['odimra']['namespace'] == "":
logger.critical("namespace not configured, exiting!!!")
sys.exit(1)
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
list_flag = False
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
ip = K8S_INVENTORY_DATA['all']['hosts'][master_node[0]]['ip']
list_deps_cmd = '/usr/bin/ssh {ip} helm list -n {namespace}'.format( \
namespace=CONTROLLER_CONF_DATA['odimra']['namespace'], ip=ip)
ret = exec(list_deps_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret == 0:
list_flag = True
break
if not list_flag:
sys.exit(1)
def list_deployment_history(depName):
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
if 'namespace' not in CONTROLLER_CONF_DATA['odimra'] or \
CONTROLLER_CONF_DATA['odimra']['namespace'] == None or \
CONTROLLER_CONF_DATA['odimra']['namespace'] == "":
logger.critical("namespace not configured, exiting!!!")
sys.exit(1)
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
list_flag = False
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
ip = K8S_INVENTORY_DATA['all']['hosts'][master_node[0]]['ip']
list_history_cmd = '/usr/bin/ssh {ip} helm history {deployment} -n {namespace}'.format( \
ip=ip, deployment=depName, \
namespace=CONTROLLER_CONF_DATA['odimra']['namespace'])
ret = exec(list_history_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret == 0:
list_flag = True
break
if not list_flag:
sys.exit(1)
def rollback_deployment(depName, revision):
logger.info("rollback %s deployment to revision %d", depName, revision)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
cur_dir = os.getcwd()
if not DRY_RUN_SET:
os.chdir(ODIMRA_SRC_PATH)
load_password_from_vault(cur_dir)
rollback_flag = False
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting rollback of %s deployment on master node %s", depName, master_node[0])
rollback_dep_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} release={depName} revision={revision}" rollback.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
depName=depName, revision=revision)
ret = exec(rollback_dep_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("rollback of %s deployment failed on master node %s", depName, master_node[0])
else:
rollback_flag=True
break
if rollback_flag:
logger.info("rollback of %s deployment to revision %d was successful", depName, revision)
else:
logger.info("rollback of %s deployment to revision %d failed", depName, revision)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def scale_plugin(plugin_name, replica_count):
logger.info("scaling plugin %s deployment to replicas %d", plugin_name, replica_count)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
cur_dir = os.getcwd()
if not DRY_RUN_SET:
os.chdir(ODIMRA_SRC_PATH)
load_password_from_vault(cur_dir)
scaling_flag = False
pluginPackagePath = CONTROLLER_CONF_DATA['odimPluginPath'] + "/" + plugin_name
if not(path.isdir(pluginPackagePath)):
logger.error("%s plugin info not present in configured odimPluginPath, scaling not supported", plugin_name)
sys.exit(1)
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting scaling of %s plugin on master node %s", plugin_name, master_node[0])
scale_plugin_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} helm_chart_name={helm_chart_name} helm_config_file={helm_config_file} replicas={replicas}" scale_plugin.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
helm_chart_name=plugin_name, helm_config_file=CONTROLLER_CONF_FILE, \
replicas=replica_count)
ret = exec(scale_plugin_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("scaling %s plugin failed on master node %s", plugin_name, master_node[0])
else:
scaling_flag=True
break
if scaling_flag:
logger.info("scaled %s plugin to %d replica(s)", plugin_name, replica_count)
else:
logger.info("failed to scale %s plugin to %d replica(s)", plugin_name, replica_count)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def scale_svc(svc_uservice_name,replica_count):
logger.info("scaling svc %s deployment to replicas %d", svc_uservice_name, replica_count)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
#loading the group_all yaml and finding helm chart full name
read_groupvar()
helmchartData=GROUP_VAR_DATA["odim_svc_helmcharts"]
userviceList=svc_uservice_name.split(",")
for data in userviceList:
if data=="all":
for helmChartName in helmchartData:
scale_svc_helm_chart(helmChartName,replica_count,helmchartData)
else:
scale_svc_helm_chart(data,replica_count,helmchartData)
def scale_svc_helm_chart(svc_uservice_name,replica_count,helmchartData):
if svc_uservice_name not in helmchartData:
logger.critical("scaling of svc %s is not supported!!!", svc_uservice_name)
sys.exit(1)
fullHelmChartName=helmchartData[svc_uservice_name]
logger.info('Full helm chart name %s',fullHelmChartName)
operationName="scale_svc"
cur_dir = os.getcwd()
os.chdir(ODIMRA_SRC_PATH)
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not os.path.exists(host_file):
logger.error("Host file not found for deployment id %s" %(DEPLOYMENT_ID))
sys.exit(1)
if not DRY_RUN_SET:
load_password_from_vault(cur_dir)
scale_flag = False
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting scaling of %s on master node %s", fullHelmChartName, master_node[0])
odimra_upgrade_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} helm_chart_name={helm_chart_name} helm_chart_name_version={helm_chart_name_version} helm_config_file={helm_config_file} replicas={replicas} ignore_err={ignore_err}" {operation_conf_file}.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
helm_chart_name=svc_uservice_name, \
helm_chart_name_version=fullHelmChartName, \
helm_config_file=CONTROLLER_CONF_FILE, \
replicas=replica_count, \
operation_conf_file=operationName,ignore_err=IGNORE_ERRORS_SET)
ret = exec(odimra_upgrade_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("ODIMRA %s failed when tried on master node %s", operationName, master_node[0])
else:
logger.info("ODIMRA %s success on master node %s", operationName, master_node[0])
scale_flag=True
break
if scale_flag:
logger.info("Completed ODIMRA %s operation", operationName)
else:
logger.info("Could not %s ODIMRA on any master nodes", operationName)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def deploy_plugin(plugin_name):
logger.info("Deploy %s plugin", plugin_name)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
plugin_list = []
if plugin_name != 'all':
pluginPackagePath = CONTROLLER_CONF_DATA['odimPluginPath'] + "/" + plugin_name
if not(path.isdir(pluginPackagePath)):
logger.error("%s plugin content not present in configured odimPluginPath, cannot deploy", plugin_name)
sys.exit(1)
plugin_list.append(plugin_name)
else:
temp_list = []
for (_, subDirName, _) in os.walk(CONTROLLER_CONF_DATA['odimPluginPath']):
temp_list.append(subDirName)
break
if len(temp_list) <= 0 or len(temp_list[0]) <= 0:
return
for item in temp_list[0]:
plugin_list.append(item)
cur_dir = os.getcwd()
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
if not DRY_RUN_SET:
os.chdir(ODIMRA_SRC_PATH)
load_password_from_vault(cur_dir)
plugin_count = 0
for plugin in plugin_list:
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting deployment of %s on master node %s", plugin, master_node[0])
deploy_plugin_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} release_name={plugin_name} helm_chart_name={helm_chart_name} helm_config_file={helm_config_file}" deploy_plugin.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
plugin_name=plugin, helm_chart_name=plugin, \
helm_config_file=CONTROLLER_CONF_FILE)
ret = exec(deploy_plugin_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("deploying %s failed on master node %s", plugin, master_node)
else:
plugin_count += 1
break
upgrade_failed_count = len(plugin_list) - plugin_count
if upgrade_failed_count == 0:
logger.info("Successfully deployed %s", plugin_list)
else:
logger.info("Deployment of %d plugin(s) in %s failed", upgrade_failed_count, plugin_list)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def remove_plugin(plugin_name):
logger.info("remove %s plugin", plugin_name)
# Parse the conf file passed
read_conf()
# Validate conf parameters passed
perform_checks()
# load existing hosts.yaml created for the deployment_id
load_k8s_host_conf()
cur_dir = os.getcwd()
host_file = os.path.join(KUBESPRAY_SRC_PATH, DEPLOYMENT_SRC_DIR, 'hosts.yaml')
pluginPackagePath = CONTROLLER_CONF_DATA['odimPluginPath'] + "/" + plugin_name
if not(path.isdir(pluginPackagePath)):
logger.info("%s was not deployed via odim controller", plugin_name)
os.chdir(cur_dir)
return
if not DRY_RUN_SET:
os.chdir(ODIMRA_SRC_PATH)
load_password_from_vault(cur_dir)
upgrade_flag = False
for master_node in K8S_INVENTORY_DATA['all']['children']['kube-master']['hosts'].items():
logger.info("Starting removal of %s plugin on master node %s", plugin_name, master_node[0])
remove_plugin_cmd = 'ansible-playbook -i {host_conf_file} --become --become-user=root \
--extra-vars "host={master_node} release_name={plugin_name} helm_chart_name={helm_chart_name} helm_config_file={helm_config_file}" remove_plugin.yaml'.format( \
host_conf_file=host_file, master_node=master_node[0], \
plugin_name=plugin_name, helm_chart_name=plugin_name, \
helm_config_file=CONTROLLER_CONF_FILE)
ret = exec(remove_plugin_cmd, {'ANSIBLE_BECOME_PASS': ANSIBLE_BECOME_PASS})
if ret != 0:
logger.critical("removal of %s plugin failed on master node %s", plugin_name, master_node[0])
else:
upgrade_flag=True
break
if upgrade_flag:
logger.info("Successfully removed %s plugin", plugin_name)
else:
logger.info("Failed to remove %s plugin", plugin_name)
os.chdir(cur_dir)
sys.exit(1)
os.chdir(cur_dir)
def init_log():
global logger
logger = logging.getLogger('odim_controller')
logger.setLevel(logging.DEBUG)
consoleHdlr = logging.StreamHandler()
consoleHdlr.setLevel(logging.DEBUG)
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)-5s - %(message)s')
consoleHdlr.setFormatter(logFormatter)
logger.addHandler(consoleHdlr)
def main():
init_log()
parser = argparse.ArgumentParser(description='ODIM controller')
parser.add_argument('--deploy', help='supported values: kubernetes, odimra')
parser.add_argument('--reset', help='supported values: kubernetes, odimra')
parser.add_argument('--addnode', help='supported values: kubernetes')
parser.add_argument('--rmnode', help='supported values: kubernetes')
parser.add_argument('--config', help='absolute path of the config file')
parser.add_argument('--dryrun', action='store_true', help='only check for configurations without deploying k8s')
parser.add_argument('--noprompt', action='store_true', help='do not prompt for confirmation')
parser.add_argument('--ignore-errors', action='store_true', help='ignore errors during odimra reset')
parser.add_argument("--upgrade", help='supported values:odimra-config,odimra-platformconfig,configure-hosts,odimra-k8s-access-config,odimra-secret,kafka-secret,zookeeper-secret,account-session,aggregation,api,events,fabrics,managers,systems,task,update,kafka,zookeeper,redis,consul,plugin,all,odimra,thirdparty')
parser.add_argument("--scale", action='store_true', help='scale odimra services and plugins')
parser.add_argument("--svc", help='supported values:account-session,aggregation,api,events,fabrics,managers,systems,task,update,all')
parser.add_argument("--plugin", help='release name of the plugin deployment to add,remove,upgrade or scale')
parser.add_argument('--add', help='supported values: plugin')
parser.add_argument('--remove', help='supported values: plugin')
parser.add_argument("--replicas", help='replica count of the odimra services or plugins', type=int)
parser.add_argument('--list', help='supported values:deployment, history')
parser.add_argument('--dep', help='deployment name, should be used with --list=history, --rollback')
parser.add_argument('--rollback', action='store_true', help='rollback deployment to particular revision')
parser.add_argument('--revision', help='revision number of the deployment, should be used with --rollback', type=int)
args = parser.parse_args()
global CONTROLLER_CONF_FILE, DRY_RUN_SET, NO_PROMPT_SET, IGNORE_ERRORS_SET
if args.deploy == None and args.reset == None and args.addnode == None and \
args.rmnode == None and args.upgrade == None and args.scale == None and \
args.list == None and args.rollback == None and args.add == None and \
args.remove == None:
logger.critical("Atleast one mandatory option must be provided")
parser.print_help()
sys.exit(1)
if args.dryrun:
DRY_RUN_SET = True
if args.noprompt:
NO_PROMPT_SET = True
if args.config != None:
CONTROLLER_CONF_FILE = args.config
if args.deploy != None:
if args.deploy == 'kubernetes':
install_k8s()
elif args.deploy == 'odimra':
install_odimra()
else:
logger.critical("Unsupported value %s for deploy option", args.deploy)
parser.print_help()
sys.exit(1)
if args.reset != None:
if args.reset == 'kubernetes':
if args.ignore_errors:
IGNORE_ERRORS_SET = True
reset_k8s()
elif args.reset == 'odimra':
if args.ignore_errors:
IGNORE_ERRORS_SET = True
uninstall_odimra()
else:
logger.critical("Unsupported value %s for reset option", args.reset)
parser.print_help()
sys.exit(1)
if args.addnode != None:
if args.addnode == 'kubernetes':
add_k8s_node()
else:
logger.critical("Unsupported value %s for addnode option", args.addnode)
parser.print_help()
sys.exit(1)
if args.rmnode != None:
if args.rmnode == 'kubernetes':
rm_k8s_node()
else:
logger.critical("Unsupported value %s for rmnode option", args.rmnode)
parser.print_help()
sys.exit(1)
if args.upgrade != None:
if args.upgrade == 'plugin':
if args.plugin == None:
logger.error("option --upgrade=plugin: expects --plugin argument")
sys.exit(1)
deploy_plugin(args.plugin)
else:
upgrade_config_map(args.upgrade)
if args.add != None:
if args.add == 'plugin':
if args.plugin == None:
logger.error("option --add=plugin: expects --plugin argument")
sys.exit(1)
deploy_plugin(args.plugin)
else:
logger.critical("Unsupported value %s for add option", args.add)
sys.exit(1)
if args.remove != None:
if args.remove == 'plugin':
if args.plugin == None:
logger.error("option --remove=plugin: expects --plugin argument")
sys.exit(1)
remove_plugin(args.plugin)
else:
logger.critical("Unsupported value %s for remove option", args.remove)
parser.print_help()
sys.exit(1)
if args.scale:
if args.replicas == None or args.replicas <= MIN_REPLICA_COUNT or args.replicas > MAX_REPLICA_COUNT:
logger.critical("Unsupported value %d for replicas option", args.replicas)
sys.exit(1)
if args.svc != None:
scale_svc(args.svc, args.replicas)
elif args.plugin != None:
scale_plugin(args.plugin, args.replicas)
else:
logger.critical("option --scale: expects --svc or --plugin argument")
parser.print_help()
sys.exit(1)
if args.list != None:
if args.list == 'deployment':
list_deployments()
elif args.list == 'history':
if args.dep == None:
logger.error("option --history: expects --dep argument")
sys.exit(1)
list_deployment_history(args.dep)
else:
logger.error("Unsupported value %s for list option", args.list)
sys.exit(1)
if args.rollback:
if args.dep == None or args.revision == None:
logger.error("option --rollback: expects both --dep and --revision arguments")
sys.exit(1)
rollback_deployment(args.dep, args.revision)
if __name__=="__main__":
main()
| 39.615646
| 313
| 0.739146
|
425fd5c0840cf5ac800e5edb68ac890f7262be50
| 648
|
py
|
Python
|
computer/migrations/0001_initial.py
|
Zomba4okk/EmployeesManager
|
bff29dec7a7b83db79ef3449e19ad51b6fd4df8d
|
[
"MIT"
] | null | null | null |
computer/migrations/0001_initial.py
|
Zomba4okk/EmployeesManager
|
bff29dec7a7b83db79ef3449e19ad51b6fd4df8d
|
[
"MIT"
] | null | null | null |
computer/migrations/0001_initial.py
|
Zomba4okk/EmployeesManager
|
bff29dec7a7b83db79ef3449e19ad51b6fd4df8d
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2020-12-23 03:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Computer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model', models.CharField(max_length=30)),
('type', models.CharField(choices=[('PC', 'PC'), ('LAPTOP', 'LAPTOP')], max_length=30)),
('purchase_date', models.DateField()),
],
),
]
| 27
| 114
| 0.560185
|
0f6f239ce7a7d71f01fe86cad02bcd999ccf2c42
| 5,833
|
py
|
Python
|
samtranslator/feature_toggle/feature_toggle.py
|
BearerPipelineTest/serverless-application-model
|
2be47dfdf1140b1e84a24bab392cfd634dfe9d1f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
samtranslator/feature_toggle/feature_toggle.py
|
BearerPipelineTest/serverless-application-model
|
2be47dfdf1140b1e84a24bab392cfd634dfe9d1f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
samtranslator/feature_toggle/feature_toggle.py
|
BearerPipelineTest/serverless-application-model
|
2be47dfdf1140b1e84a24bab392cfd634dfe9d1f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import os
import sys
import json
import boto3
import logging
import hashlib
from botocore.config import Config
from samtranslator.feature_toggle.dialup import (
DisabledDialup,
ToggleDialup,
SimpleAccountPercentileDialup,
)
from samtranslator.metrics.method_decorator import cw_timer
LOG = logging.getLogger(__name__)
class FeatureToggle:
"""
FeatureToggle is the class which will provide methods to query and decide if a feature is enabled based on where
SAM is executing or not.
"""
DIALUP_RESOLVER = {
"toggle": ToggleDialup,
"account-percentile": SimpleAccountPercentileDialup,
}
def __init__(self, config_provider, stage, account_id, region):
self.feature_config = config_provider.config
self.stage = stage
self.account_id = account_id
self.region = region
def _get_dialup(self, region_config, feature_name):
"""
get the right dialup instance
if no dialup type is provided or the specified dialup is not supported,
an instance of DisabledDialup will be returned
:param region_config: region config
:param feature_name: feature_name
:return: an instance of
"""
dialup_type = region_config.get("type")
if dialup_type in FeatureToggle.DIALUP_RESOLVER:
return FeatureToggle.DIALUP_RESOLVER[dialup_type](
region_config, account_id=self.account_id, feature_name=feature_name
)
LOG.warning("Dialup type '{}' is None or is not supported.".format(dialup_type))
return DisabledDialup(region_config)
def is_enabled(self, feature_name):
"""
To check if feature is available
:param feature_name: name of feature
"""
if feature_name not in self.feature_config:
LOG.warning("Feature '{}' not available in Feature Toggle Config.".format(feature_name))
return False
stage = self.stage
region = self.region
account_id = self.account_id
if not stage or not region or not account_id:
LOG.warning(
"One or more of stage, region and account_id is not set. Feature '{}' not enabled.".format(feature_name)
)
return False
stage_config = self.feature_config.get(feature_name, {}).get(stage, {})
if not stage_config:
LOG.info("Stage '{}' not enabled for Feature '{}'.".format(stage, feature_name))
return False
if account_id in stage_config:
account_config = stage_config[account_id]
region_config = account_config[region] if region in account_config else account_config.get("default", {})
else:
region_config = stage_config[region] if region in stage_config else stage_config.get("default", {})
dialup = self._get_dialup(region_config, feature_name=feature_name)
LOG.info("Using Dialip {}".format(dialup))
is_enabled = dialup.is_enabled()
LOG.info("Feature '{}' is enabled: '{}'".format(feature_name, is_enabled))
return is_enabled
class FeatureToggleConfigProvider:
"""Interface for all FeatureToggle config providers"""
def __init__(self):
pass
@property
def config(self):
raise NotImplementedError
class FeatureToggleDefaultConfigProvider(FeatureToggleConfigProvider):
"""Default config provider, always return False for every query."""
def __init__(self):
FeatureToggleConfigProvider.__init__(self)
@property
def config(self):
return {}
class FeatureToggleLocalConfigProvider(FeatureToggleConfigProvider):
"""Feature toggle config provider which uses a local file. This is to facilitate local testing."""
def __init__(self, local_config_path):
FeatureToggleConfigProvider.__init__(self)
with open(local_config_path, "r") as f:
config_json = f.read()
self.feature_toggle_config = json.loads(config_json)
@property
def config(self):
return self.feature_toggle_config
class FeatureToggleAppConfigConfigProvider(FeatureToggleConfigProvider):
"""Feature toggle config provider which loads config from AppConfig."""
@cw_timer(prefix="External", name="AppConfig")
def __init__(self, application_id, environment_id, configuration_profile_id):
FeatureToggleConfigProvider.__init__(self)
try:
LOG.info("Loading feature toggle config from AppConfig...")
# Lambda function has 120 seconds limit
# (5 + 5) * 2, 20 seconds maximum timeout duration
# In case of high latency from AppConfig, we can always fall back to use an empty config and continue transform
client_config = Config(connect_timeout=5, read_timeout=5, retries={"total_max_attempts": 2})
self.app_config_client = boto3.client("appconfig", config=client_config)
response = self.app_config_client.get_configuration(
Application=application_id,
Environment=environment_id,
Configuration=configuration_profile_id,
ClientId="FeatureToggleAppConfigConfigProvider",
)
binary_config_string = response["Content"].read()
self.feature_toggle_config = json.loads(binary_config_string.decode("utf-8"))
LOG.info("Finished loading feature toggle config from AppConfig.")
except Exception as ex:
LOG.error("Failed to load config from AppConfig: {}. Using empty config.".format(ex))
# There is chance that AppConfig is not available in a particular region.
self.feature_toggle_config = json.loads("{}")
@property
def config(self):
return self.feature_toggle_config
| 36.917722
| 123
| 0.674267
|
05948391e430bc86b87b3db56e00869f67095460
| 1,093
|
py
|
Python
|
configs/twins/twins_svt-s_uperhead_8x2_512x512_160k_ade20k.py
|
heytanay/mmsegmentation
|
7ddd2fe2ecff9c95999bd00ec05cc37eafb558f8
|
[
"Apache-2.0"
] | 11
|
2022-02-04T01:09:45.000Z
|
2022-03-08T05:49:16.000Z
|
configs/twins/twins_svt-s_uperhead_8x2_512x512_160k_ade20k.py
|
heytanay/mmsegmentation
|
7ddd2fe2ecff9c95999bd00ec05cc37eafb558f8
|
[
"Apache-2.0"
] | 1
|
2022-01-07T15:03:23.000Z
|
2022-01-12T14:39:09.000Z
|
configs/twins/twins_svt-s_uperhead_8x2_512x512_160k_ade20k.py
|
heytanay/mmsegmentation
|
7ddd2fe2ecff9c95999bd00ec05cc37eafb558f8
|
[
"Apache-2.0"
] | 2
|
2021-04-23T05:32:00.000Z
|
2021-11-11T02:45:08.000Z
|
_base_ = [
'../_base_/models/twins_pcpvt-s_upernet.py',
'../_base_/datasets/ade20k.py', '../_base_/default_runtime.py',
'../_base_/schedules/schedule_160k.py'
]
model = dict(
backbone=dict(
type='SVT',
init_cfg=dict(
type='Pretrained', checkpoint='pretrained/alt_gvt_small.pth'),
embed_dims=[64, 128, 256, 512],
num_heads=[2, 4, 8, 16],
mlp_ratios=[4, 4, 4, 4],
depths=[2, 2, 10, 4],
windiow_sizes=[7, 7, 7, 7],
norm_after_stage=True),
decode_head=dict(in_channels=[64, 128, 256, 512]),
auxiliary_head=dict(in_channels=256))
optimizer = dict(
_delete_=True,
type='AdamW',
lr=0.00006,
betas=(0.9, 0.999),
weight_decay=0.01,
paramwise_cfg=dict(custom_keys={
'pos_block': dict(decay_mult=0.),
'norm': dict(decay_mult=0.)
}))
lr_config = dict(
_delete_=True,
policy='poly',
warmup='linear',
warmup_iters=1500,
warmup_ratio=1e-6,
power=1.0,
min_lr=0.0,
by_epoch=False)
data = dict(samples_per_gpu=2, workers_per_gpu=2)
| 26.02381
| 74
| 0.600183
|
814d030d0c581425cc9c8c7602c26b7424b22d29
| 17,800
|
py
|
Python
|
qt_binder/widgets.py
|
Qt-Widgets/qt_binding-traits
|
16c7a9ee439ff35d4d027c797ae1d05453a5fc06
|
[
"BSD-3-Clause"
] | 15
|
2015-09-02T11:16:50.000Z
|
2021-06-24T04:00:52.000Z
|
qt_binder/widgets.py
|
Qt-Widgets/qt_binding-traits
|
16c7a9ee439ff35d4d027c797ae1d05453a5fc06
|
[
"BSD-3-Clause"
] | 54
|
2015-09-02T10:45:49.000Z
|
2020-11-30T13:01:05.000Z
|
qt_binder/widgets.py
|
Qt-Widgets/qt_binding-traits
|
16c7a9ee439ff35d4d027c797ae1d05453a5fc06
|
[
"BSD-3-Clause"
] | 3
|
2015-09-16T17:23:50.000Z
|
2016-07-23T05:35:55.000Z
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2014-2015, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
#------------------------------------------------------------------------------
from __future__ import division
from math import exp, log
import operator
import six
from traits.api import Any, Callable, Constant, Dict, Enum, Float, Instance, \
Int, List, NO_COMPARE, Str, Tuple, Undefined, Unicode, on_trait_change
from .binder import Binder, QtDynamicProperty, Rename, Default
from .qt import QtCore, QtGui
from .qt.ui_loader import load_ui
from .raw_widgets import ComboBox, Composite, LineEdit, Slider, binder_registry
INVALID_STYLE_RULE = ("*[valid='false'] "
"{ background-color: rgb(255, 192, 192); }")
class TextField(LineEdit):
""" Simple customization of a LineEdit.
The widget can be configured to update the model on every text change or
only when Enter is pressed (or focus leaves). This emulates Traits UI's
`TextEditor` `auto_set` and `enter_set` configurations.
If a validator is set, invalid text will cause the background to be red.
"""
#: The value to sync with the model.
value = Unicode(comparison_mode=NO_COMPARE)
#: Whether the `value` updates on every keypress, or when Enter is pressed
#: (or `focusOut`).
mode = Enum('auto', 'enter')
#: Whether or not the current value is valid, for the stylesheet.
valid = QtDynamicProperty(True)
def configure(self):
self.styleSheet = INVALID_STYLE_RULE
def _update_valid(self, text):
""" Update the valid trait based on validation of ``text``.
"""
validator = self.validator
if validator is not None:
state, fixed, pos = validator.validate(text, len(text))
self.valid = (state == validator.Acceptable)
@on_trait_change('textEdited')
def _on_textEdited(self, text):
if (self.mode == 'auto' and
'value' not in self.loopback_guard):
with self.loopback_guard('value'):
self._update_valid(text)
self.value = text
@on_trait_change('editingFinished')
def _on_editingFinished(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.value = self.text
@on_trait_change('text,validator')
def _on_text(self):
self._update_valid(self.text)
def _value_changed(self, new):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.text = new
class EditableComboBox(ComboBox):
""" ComboBox with an editable text field.
We do not do bidirectional synchronization of the value with the model
since that is typically not required for these use cases.
"""
lineEdit_class = TextField
#: The selected value.
value = Any(Undefined, comparison_mode=NO_COMPARE)
#: (object, label) pairs.
values = List(Tuple(Any, Unicode))
#: Function that is used to compare two objects in the values list for
#: equality. Defaults to normal Python equality.
same_as = Callable(operator.eq)
editable = Constant(True)
def configure(self):
self.qobj.setEditable(True)
self._on_editable()
super(EditableComboBox, self).configure()
@on_trait_change('values,values_items,qobj')
def _update_values(self):
qobj = self.qobj
if qobj is not None:
old_value = self.value
current_text = qobj.currentText()
current_index = qobj.currentIndex()
# Check if the user entered in custom text that should be
# preserved.
preserve_text = (current_index == -1 or
qobj.itemData(current_index) is None or
current_text != qobj.itemText(current_index))
labels = []
new_index = -1
for i, (value, label) in enumerate(self.values):
if self.same_as(value, old_value):
new_index = i
labels.append(label)
with self.loopback_guard('value'):
if qobj.count() > 0:
qobj.clear()
# Items from the list get their index into the values list
# added as their user data as well. Items added from the text
# field will have that still be None.
for i, label in enumerate(labels):
qobj.addItem(label, i)
if preserve_text:
qobj.setEditText(current_text)
self.value = current_text
else:
qobj.setCurrentIndex(new_index)
@on_trait_change('currentIndexChanged_int')
def _on_currentIndexChanged(self, index):
if index != -1 and 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
values_index = self.qobj.itemData(index)
if values_index is not None:
self.value = self.values[values_index][0]
else:
# Otherwise, it's one of the added values.
self.value = self.qobj.itemText(index)
@on_trait_change('lineEdit:textEdited')
def _on_textEdited(self, text):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.value = text
class EnumDropDown(ComboBox):
""" Select from a set of preloaded choices.
"""
#: The selected value.
value = Any(Undefined, comparison_mode=NO_COMPARE)
#: (object, label) pairs.
values = List(Tuple(Any, Unicode))
#: Function that is used to compare two objects in the values list for
#: equality. Defaults to normal Python equality.
same_as = Callable(operator.eq)
editable = Constant(False)
@on_trait_change('values,values_items,qobj')
def _update_values(self):
qobj = self.qobj
if qobj is not None:
old_value = self.value
labels = []
if self.editable:
new_index = -1
else:
new_index = 0
for i, (value, label) in enumerate(self.values):
if self.same_as(value, old_value):
new_index = i
labels.append(label)
if qobj.count() > 0:
qobj.clear()
qobj.addItems(labels)
qobj.setCurrentIndex(new_index)
@on_trait_change('currentIndexChanged_int')
def _on_currentIndexChanged(self, index):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.value = self.values[index][0]
def _value_changed(self, new):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
new_index = -1
for i, (value, label) in enumerate(self.values):
if self.same_as(value, new):
new_index = i
break
self.currentIndex = new_index
class UIFile(Composite):
""" Load a layout from a Qt Designer `.ui` file.
Widgets and layouts with names that do not start with underscores will be
added as traits to this :class:`~.Binder`. The :data:`~.binder_registry`
will be consulted to find the raw :class:`~.Binder` to use for each widget.
This can be overridden for any named widget using the :attr:`overrides`
trait.
In case one wants to let the :class:`~.Binder` to own its widget but just
use the `.ui` file for layout, use the :attr:`insertions` dictionary. The
named widget should be a plain `QWidget` in the UI laid out as desired. The
:class:`~.Binder` will create a new widget as the lone child of this
widget and take up all of its space.
"""
qclass = QtGui.QWidget
#: The .ui file with the layout.
filename = Str()
#: Override binders for named widgets.
overrides = Dict(Str, Instance(Binder))
#: Insert binders as children of the named QWidgets.
insertions = Dict(Str, Instance(Binder))
def __init__(self, filename, **traits):
super(UIFile, self).__init__(filename=filename, **traits)
def construct(self, *args, **kwds):
qobj, to_be_bound = load_ui(self.filename)
for name in to_be_bound:
obj = qobj.findChild(QtCore.QObject, name)
self.add_trait(name, Instance(Binder))
if name in self.overrides:
binder = self.overrides[name]
binder.qobj = obj
elif name in self.insertions:
binder = self.insertions[name]
binder.construct()
old_layout = obj.layout()
if old_layout is not None:
# Qt hack to replace the layout. We need to ensure that the
# old one is truly deleted. Reparent it onto a widget that
# we then discard.
QtGui.QWidget().setLayout(old_layout)
layout = QtGui.QVBoxLayout(obj)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(binder.qobj)
else:
binder = binder_registry.lookup(obj)()
binder.qobj = obj
setattr(self, name, binder)
self.qobj = qobj
class BaseSlider(Slider):
""" Base class for the other sliders.
Mostly for interface-checking and common defaults.
"""
#: The value to synch with the model.
value = Any(0)
#: The inclusive range.
range = Tuple(Any(0), Any(99))
#: The underlying Qt value.
qt_value = Rename('value')
# The Qt default is vertical for some awful reason.
orientation = Default(QtCore.Qt.Horizontal)
class IntSlider(BaseSlider):
#: The value to synch with the model.
value = Int(0)
#: The inclusive range.
range = Tuple(Int(0), Int(99))
def configure(self):
# Set the initial values.
self._range_changed()
self._value_changed()
def _range_changed(self):
if self.qobj is not None:
self.qobj.setRange(*self.range)
else:
minimum, maximum = self.range
self.trait_set(
minimum=minimum,
maximum=maximum,
)
def _value_changed(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.qt_value = self.value
@on_trait_change('qt_value')
def _on_qt_value(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.value = self.qt_value
class FloatSlider(BaseSlider):
#: The value to synch with the model.
value = Float(0.0)
#: The inclusive range.
range = Tuple(Float(0.0), Float(1.0))
#: The number of steps in the range.
precision = Int(1000)
def configure(self):
# Set the initial values.
self._precision_changed()
self._value_changed()
def _precision_changed(self):
self.maximum = self.precision
def _range_changed(self):
self._value_changed()
def _value_changed(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.qt_value = self._qt_value_from_python(self.value)
@on_trait_change('qt_value')
def _on_qt_value(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
self.value = self._python_value_from_qt(self.qt_value)
def _qt_value_from_python(self, value):
low, high = self.range
precision = self.precision
qt_value = int(round((value - low) * precision / (high - low)))
qt_value = min(max(qt_value, 0), precision)
return qt_value
def _python_value_from_qt(self, qt_value):
low, high = self.range
precision = self.precision
value = qt_value * (high - low) / precision + low
return value
class LogSlider(FloatSlider):
#: The inclusive range.
range = Tuple(Float(1e-2), Float(100.0))
def _qt_value_from_python(self, value):
low, high = self.range
precision = self.precision
value = max(value, low)
log_low = log(low)
log_high = log(high)
log_value = log(value)
qt_value = int(round((log_value - log_low) *
precision /
(log_high - log_low)))
qt_value = min(max(qt_value, 0), precision)
return qt_value
def _python_value_from_qt(self, qt_value):
low, high = self.range
precision = self.precision
log_low = log(low)
log_high = log(high)
log_value = qt_value * (log_high - log_low) / precision + log_low
value = exp(log_value)
return value
class RangeSlider(Composite):
""" A slider with labels and a text entry field.
The root widget is a `QWidget` with a new property
`binder_class=RangeSlider`. Stylesheets can reference it using the
selector::
*[binder_class="RangeSlider"] {...}
This can be useful for styling the child `QLabels` and `QLineEdit`, for
example to make a series of `RangeSliders` align.
"""
qclass = QtGui.QWidget
#: The value to synch with the model.
value = Any(0)
#: The inclusive range.
range = Tuple(Any(0), Any(99))
#: The formatting function for the labels.
label_format_func = Callable(six.text_type)
#: The formatting function for the text field. This is used only when the
#: slider is setting the value.
field_format_func = Callable(six.text_type)
#: The slider widget.
slider = Instance(BaseSlider, factory=IntSlider, args=())
#: The field widget.
field = Instance(TextField, args=())
_low_label = Any()
_high_label = Any()
_from_text_func = Callable(int)
def __init__(self, *args, **traits):
# Make sure that a `slider` argument gets assigned before anything else
# because it will affect what range can be accepted.
if 'slider' in traits:
slider = traits.pop('slider')
super(RangeSlider, self).__init__()
self.slider = slider
self.trait_set(**traits)
else:
super(RangeSlider, self).__init__(*args, **traits)
def construct(self):
self.slider.construct()
self.field.construct()
super(RangeSlider, self).construct()
self.qobj.setProperty('binder_class', u'RangeSlider')
layout = QtGui.QHBoxLayout()
self._low_label = QtGui.QLabel()
self._low_label.setAlignment(QtCore.Qt.AlignRight)
self._high_label = QtGui.QLabel()
self._high_label.setAlignment(QtCore.Qt.AlignLeft)
layout.addWidget(self._low_label)
layout.addWidget(self.slider.qobj)
layout.addWidget(self._high_label)
layout.addWidget(self.field.qobj)
layout.setContentsMargins(0, 0, 0, 0)
self.qobj.setLayout(layout)
def configure(self):
super(RangeSlider, self).configure()
if isinstance(self.slider, IntSlider):
# Use an integer validator for the text field.
self.field.validator = QtGui.QIntValidator()
self._from_text_func = int
else:
self.field.validator = QtGui.QDoubleValidator()
self._from_text_func = float
self._update_widgets()
@on_trait_change('value,range')
def _update_widgets(self):
# Update the range then the value because the widgets will just
# silently reject values out of bounds.
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
value = self.value
range = self.range
if self.qobj is not None:
self.field.validator.setRange(range[0], range[1])
if not isinstance(self.slider, IntSlider):
# Note: this assumes that all sliders other than
# IntSlider have decimal inputs.
self.field.validator.setDecimals(16)
self._low_label.setText(self.label_format_func(range[0]))
self._high_label.setText(self.label_format_func(range[1]))
self.field.text = six.text_type(value)
self.slider.range = range
self.slider.value = value
@on_trait_change('slider:value')
def _on_slider_value(self):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
value = self.slider.value
self.value = value
self.field.text = self.field_format_func(value)
@on_trait_change('field:value')
def _on_field_text(self, text):
if 'value' not in self.loopback_guard:
with self.loopback_guard('value'):
if self.field.valid:
try:
value = self._from_text_func(text)
except ValueError:
pass
else:
self.value = value
self.slider.value = value
| 34.099617
| 79
| 0.59764
|
ea1750f6b9aa22e84bacf4224262692b624e9c58
| 127
|
py
|
Python
|
function.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | null | null | null |
function.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | null | null | null |
function.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | 1
|
2021-08-17T03:46:56.000Z
|
2021-08-17T03:46:56.000Z
|
def example(): # def is a keyword to create any function.
print('Basic funtion')
z = 3+9
print(z)
example()
| 18.142857
| 58
| 0.590551
|
77217e38f9034246d687eb4f6c3c523afd6b10f8
| 3,830
|
py
|
Python
|
webapp/app.py
|
eandreas/ds18b20
|
eac5da4d26504a3b8f9472955ec7fe38b104e732
|
[
"Apache-2.0"
] | null | null | null |
webapp/app.py
|
eandreas/ds18b20
|
eac5da4d26504a3b8f9472955ec7fe38b104e732
|
[
"Apache-2.0"
] | null | null | null |
webapp/app.py
|
eandreas/ds18b20
|
eac5da4d26504a3b8f9472955ec7fe38b104e732
|
[
"Apache-2.0"
] | null | null | null |
from pathlib2 import Path
import dash
from dash.dependencies import Output, Input
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import pandas as pd
import plotly.graph_objects as go
import dash_bootstrap_components as dbc
external_stylesheets = [
dbc.themes.YETI
#dbc.themes.DARKLY
#'https://codepen.io/chriddyp/pen/bWLwgP.css'
]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
def load_data():
temp_file = Path('/Users/eandreas/projects/dev/ds18b20/resources/get_temp_C.out')
#temp_file = Path('/home/pi/get_temp_C.out')
return pd.read_csv(temp_file, sep=' ', header=None, names=['dev_sn', 'date', 'time', 'temp_raw', 'temp_C'], parse_dates=[['date', 'time']])
def build_figure(df):
fig = go.Figure()
fig.add_trace(go.Scatter(x=list(df.date_time), y=list(df.temp_C), name='t_corr'))
fig.add_trace(go.Scatter(x=list(df.date_time), y=list(df.temp_raw), name='t_raw'))
fig.update_layout(
#title_text="Gemessene Temperatur - DS18B20@dragonfly",
xaxis_title="Datum",
yaxis_title="Temperatur / °C",
template='none'
)
fig.update_layout(
xaxis=dict(
rangeselector=dict(
buttons=list([
dict(count=1,
label="1 h",
step="hour",
stepmode="backward"),
dict(count=6,
label="6 h",
step="hour",
stepmode="backward"),
dict(count=1,
label="1 d",
step="day",
stepmode="backward"),
dict(count=7,
label="7 d",
step="day",
stepmode="backward"),
dict(count=1,
label="1 m",
step="month",
stepmode="backward"),
dict(count=1,
label="1 y",
step="year",
stepmode="backward"),
dict(step="all")
])
),
rangeslider=dict(
visible=True
),
type="date"
),
uirevision="true"
)
return fig
df = load_data()
NAVBAR = dbc.NavbarSimple(
brand="Raspberry Pi Temperaturmessung",
brand_href="#",
color="primary",
dark=True,
)
def get_temp_graph(df):
return dbc.Card(
[
dbc.CardHeader("Gemessene Temperatur im Zimmer 2 (Büro) - DS18B20@dragonfly"),
dbc.CardBody(
[
dcc.Graph(
id='live_temp_graph',
figure=build_figure(df),
config={'displayModeBar': False}
),
dcc.Interval(
id = 'graph_update_interval',
interval = 1 * 1000,
n_intervals=0
)
]
)
]
)
def get_body(df):
retirm html.Div(children=[
], className='col-12')
#return dbc.Container(
# [
# get_temp_graph(df),
# ],
# className="mt-2 mb-3",
#)
def serve_layout():
return html.Div(children=[
NAVBAR,
get_body(load_data())
])
@app.callback(Output('live_temp_graph', 'figure'),
Input('graph_update_interval', 'n_intervals'))
def update_graph_scatter(n):
return build_figure(load_data())
app.layout = serve_layout
if __name__ == '__main__':
app.run_server(debug=True, host='0.0.0.0', port=8080)
| 29.689922
| 143
| 0.493473
|
16adb4b410dc40a6ef49912b8a2f533cfddda810
| 7,830
|
py
|
Python
|
SIVAE.py
|
mingzhang-yin/SIVI-Semi-implicit-Variational-Inference-
|
c457d09dddcfed94602c48e75ba6f2a3d159b3dc
|
[
"MIT"
] | 80
|
2018-05-29T04:29:25.000Z
|
2022-03-22T02:02:17.000Z
|
SIVAE.py
|
mingzhang-yin/SIVI-Semi-implicit-Variational-Inference-
|
c457d09dddcfed94602c48e75ba6f2a3d159b3dc
|
[
"MIT"
] | 1
|
2018-10-11T20:27:44.000Z
|
2018-10-13T18:00:48.000Z
|
SIVAE.py
|
mingzhang-yin/SIVI-Semi-implicit-Variational-Inference-
|
c457d09dddcfed94602c48e75ba6f2a3d159b3dc
|
[
"MIT"
] | 13
|
2018-05-28T22:35:40.000Z
|
2021-02-12T04:13:04.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import numpy as np
import os
import sys
import seaborn as sns
import scipy.spatial.distance
from matplotlib import pyplot as plt
import pandas as pd
import scipy.stats as stats
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
slim=tf.contrib.slim
Exponential=tf.contrib.distributions.Exponential(rate=1.0)
Normal=tf.contrib.distributions.Normal(loc=0., scale=1.)
Mvn=tf.contrib.distributions.MultivariateNormalDiag
Bernoulli = tf.contrib.distributions.Bernoulli
plt.ioff()
sys.path.append(os.getcwd())
#%%
def sample_psi(x,noise_dim,K,z_dim,reuse=False):
with tf.variable_scope("hyper_psi") as scope:
if reuse:
scope.reuse_variables()
x_0 = tf.expand_dims(x,axis=1)
x_1 = tf.tile(x_0,[1,K,1]) #N*K*784
B3 = Bernoulli(0.5)
e3 = tf.cast(B3.sample([tf.shape(x)[0],K,noise_dim[0]]),tf.float32)
input_ = tf.concat([e3,x_1],axis=2)
h3 = slim.stack(input_,slim.fully_connected,[500,500,noise_dim[0]])
B2 = Bernoulli(0.5)
e2 = tf.cast(B2.sample([tf.shape(x)[0],K,noise_dim[1]]),tf.float32)
input_1 = tf.concat([h3,e2,x_1],axis=2)
h2 = slim.stack(input_1,slim.fully_connected,[500,500,noise_dim[1]])
B1 = Bernoulli(0.5)
e1 = tf.cast(B1.sample([tf.shape(x)[0],K,noise_dim[2]]),tf.float32)
h1 = slim.stack(tf.concat([h2,e1,x_1],axis=2),slim.fully_connected,[500,500,500])
mu = tf.reshape(slim.fully_connected(h1,z_dim,activation_fn=None,scope='implicit_hyper_mu'),[-1,K,z_dim])
return mu
def sample_logv(x,noise_dim,z_dim,reuse=False):
with tf.variable_scope("hyper_sigma") as scope:
if reuse:
scope.reuse_variables()
net1 = slim.stack(x,slim.fully_connected,[500,500],scope='sigma')
z_logv = tf.reshape(slim.fully_connected(net1,z_dim,activation_fn=None,scope='sigma2'),[-1,z_dim])
return z_logv
def sample_n(psi,sigma):
eps = tf.random_normal(shape=tf.shape(psi))
z=psi+eps*sigma
return z
def decoder(z,x_dim,reuse=False):
with tf.variable_scope("decoder") as scope:
if reuse:
scope.reuse_variables()
net3 = slim.stack(z,slim.fully_connected,[500,500,500],scope='decoder_1')
logits_x = slim.fully_connected(net3,x_dim,activation_fn=None,scope='decoder_2')
return logits_x
#%%
tf.reset_default_graph()
z_dim = 64
noise_dim = [150,100,50]
x_dim = 784
eps = 1e-10
WU = tf.placeholder(tf.float32, shape=()) #warm-up
x = tf.placeholder(tf.float32,[None,x_dim])
J = tf.placeholder(tf.int32, shape=()) #estimate h
merge = tf.placeholder(tf.int32, shape=[])
K = tf.placeholder(tf.int32, shape=()) #iwae
z_logv = sample_logv(x,noise_dim,z_dim)
z_logv_iw = tf.tile(tf.expand_dims(z_logv,axis=1),[1,K,1])
sigma_iw1 = tf.exp(z_logv_iw/2)
sigma_iw2 = tf.cond(merge>0,lambda:tf.tile(tf.expand_dims(sigma_iw1,axis=2),[1,1,J+1,1]),
lambda:tf.tile(tf.expand_dims(sigma_iw1,axis=2),[1,1,J,1]))
psi_iw = sample_psi(x,noise_dim,K,z_dim)
z_sample_iw = sample_n(psi_iw,sigma_iw1)
z_sample_iw1 = tf.expand_dims(z_sample_iw,axis=2)
z_sample_iw2 = tf.cond(merge>0,lambda:tf.tile(z_sample_iw1,[1,1,J+1,1]),
lambda:tf.tile(z_sample_iw1,[1,1,J,1]))
psi_iw_star = sample_psi(x,noise_dim,J,z_dim,reuse=True)
psi_iw_star0 = tf.expand_dims(psi_iw_star,axis=1)
psi_iw_star1 = tf.tile(psi_iw_star0,[1,K,1,1])
psi_iw_star2 = tf.cond(merge>0,lambda:tf.concat([psi_iw_star1, tf.expand_dims(psi_iw,axis=2)],2),
lambda:psi_iw_star1)
ker = tf.exp(-0.5*tf.reduce_sum(tf.square(z_sample_iw2-psi_iw_star2)/tf.square(sigma_iw2+eps),3))
log_H_iw = tf.log(tf.reduce_mean(ker,axis=2))-0.5*tf.reduce_sum(z_logv_iw,2) #change to tf.reduce_logsumexp if there is NA
log_prior_iw = -0.5*tf.reduce_sum(tf.square(z_sample_iw),2)
x_iw = tf.tile(tf.expand_dims(x,axis=1),[1,K,1])
logits_x_iw = decoder(z_sample_iw,x_dim)
p_x_iw = Bernoulli(logits=logits_x_iw)
reconstruct_iw = p_x_iw.mean()
log_lik_iw = tf.reduce_sum( x_iw * tf.log(reconstruct_iw + eps)
+ (1-x_iw) * tf.log(1 - reconstruct_iw + eps),2)
loss_iw0 = -tf.reduce_logsumexp(log_lik_iw+(log_prior_iw-log_H_iw)*WU,1)+tf.log(tf.cast(K,tf.float32))
loss_iw = tf.reduce_mean(loss_iw0)
var_all = slim.get_model_variables()
lr=tf.constant(0.001)
g_step = tf.Variable(0, trainable=False)
train_op = tf.train.AdamOptimizer(learning_rate=lr).minimize(loss_iw,var_list=var_all,global_step=g_step)
init_op=tf.global_variables_initializer()
#%%
mnist = input_data.read_data_sets(os.getcwd()+'/MNIST', one_hot=True)
train_data=mnist.train
test_data=mnist.test
dat_train=[]
dat_test=[]
sess=tf.InteractiveSession()
sess.run(init_op)
print("This is SIVAE_MNIST test")
training_epochs=2000
batch_size = 200
total_points = mnist.train.num_examples
total_batch = int(total_points / batch_size)
display_step=1
total_test_batch = int(mnist.test.num_examples / batch_size)
J_value = 1
warm_up = 0
from time import sleep
for epoch in range(training_epochs):
avg_cost = 0.
avg_cost_test = 0.
np_lr = 0.001 * 0.75**(epoch/100)
warm_up = np.min([epoch/300,1])
if epoch<1900:
J_value = 1
else:
J_value = 50
for i in range(total_batch):
train_xs_0,_ = train_data.next_batch(batch_size)
train_xs = np.random.binomial(1,train_xs_0)
_ = sess.run([train_op],{x:train_xs,lr:np_lr,merge:1,J:J_value,K:1,WU:warm_up})
if epoch>1900:
for k in range(total_batch):
train_xs_0,_ = train_data.next_batch(batch_size)
train_xs = np.random.binomial(1,train_xs_0)
cost=sess.run(loss_iw,{x:train_xs,J:J_value,merge:1,K:1,WU:1.0})
avg_cost += cost / total_batch
for j in range(total_test_batch):
test_xs_0,_ = test_data.next_batch(batch_size)
test_xs = np.random.binomial(1,test_xs_0)
cost_test=sess.run(loss_iw,{x:test_xs,J:J_value,merge:1,K:1,WU:1.0})
avg_cost_test += cost_test / total_test_batch
dat_train.append([epoch,avg_cost])
dat_test.append([epoch,avg_cost_test])
if epoch % display_step == 0:
print("Epoch:", '%04d' % epoch,
"cost_train=", "{:.9f}".format(avg_cost),
"cost_test=", "{:.9f}".format(avg_cost_test))
avg_evi_test = 0
for j in range(total_test_batch):
test_xs_0,_ = test_data.next_batch(batch_size)
test_xs = np.random.binomial(1,test_xs_0)
evi_test=sess.run(loss_iw,{x:test_xs,J:J_value,merge:1,K:1000,WU:1.0})
avg_evi_test += evi_test / total_test_batch
L_1000 = avg_evi_test
print("&&&&&&&& The final test evidence is", L_1000)
if not os.path.exists('out/'):
os.makedirs('out/')
dat0 = np.array(dat_train)
dat1 = np.array(dat_test)
df0 = pd.DataFrame({'epoch':dat0[:,0],'train':dat0[:,1]})
df1 = pd.DataFrame({'epoch':dat1[:,0],'test':dat1[:,1]})
df = pd.concat([df0,df1], ignore_index=True, axis=1)
name_data1 = 'out/data_dim4_'+str(noise_dim)+'.csv'
df.to_csv(name_data1,index=False)
name_fig1 = 'out/slim_ELBO_dim4_'+str(noise_dim)+'.png'
if 1:
plt.figure()
dat0 = np.array(dat_train)
dat1 = np.array(dat_test)
plt.plot(dat0[:,0],dat0[:,1],'o-',label='train')
plt.plot(dat1[:,0],dat1[:,1],'o-',label='test')
plt.legend(fontsize = 'x-large')
plt.title("Training performance",fontsize = 'x-large')
plt.ylabel('nats',fontsize = 'x-large')
plt.xlabel('epoch',fontsize = 'x-large')
plt.savefig(name_fig1, bbox_inches='tight')
plt.close('all')
| 31.700405
| 122
| 0.673308
|
6bc5156345fc949358ea0409b7852ee508bcd3d3
| 1,228
|
py
|
Python
|
examples/maze/maze-4.py
|
predicatemike/predigame
|
096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f
|
[
"Apache-2.0"
] | null | null | null |
examples/maze/maze-4.py
|
predicatemike/predigame
|
096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f
|
[
"Apache-2.0"
] | null | null | null |
examples/maze/maze-4.py
|
predicatemike/predigame
|
096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f
|
[
"Apache-2.0"
] | null | null | null |
WIDTH = 30
HEIGHT = 18
TITLE = 'MAZE'
# a callback that keeps the player from running
# into walls. it's only acceptable to walk into
# an object marked as a "destination"
def evaluate(action, sprite, pos):
obj = at(pos)
if obj:
if obj.tag == 'destination':
return True
else:
return False
else:
return True
# create a sprite based on the "player" image
# position at the top left corner. control the
# sprite with the arrow keys while checking a
# precondition to make sure we don't walk into
# walls. the speed of the sprite enables "graceful"
# movement with the keyboard
p = image('player', (0, 0)).speed(5).keys(precondition=evaluate)
# center the player on the 0,0 grid cell
p.move_to((0, 0))
# load a maze from a file
maze('2', partial(image, 'stone'))
# a callback function for when the player reaches
# the green destination
def win(b, p):
text('YOU WIN', BLUE)
gameover()
# draw a green destination cell on the bottom right
d = shape(RECT, GREEN, (WIDTH-1, HEIGHT-1), tag='destination')
# if the player reaches this cell, execute the 'win' callback
d.collides(p, win)
# register the 'r' key for resetting the game
keydown('r', reset)
| 26.12766
| 64
| 0.675896
|
8b26d5cfde285a20fd04097031792962b1d79b30
| 5,604
|
py
|
Python
|
src/models/yolo/yolo_layers.py
|
Brechard/computer-vision-tf2
|
6ccd2a669058f0c106d9b26a6e2c674b235fed6d
|
[
"MIT"
] | 4
|
2019-11-26T19:01:57.000Z
|
2020-04-02T05:41:16.000Z
|
src/models/yolo/yolo_layers.py
|
Brechard/computer-vision-tf2
|
6ccd2a669058f0c106d9b26a6e2c674b235fed6d
|
[
"MIT"
] | null | null | null |
src/models/yolo/yolo_layers.py
|
Brechard/computer-vision-tf2
|
6ccd2a669058f0c106d9b26a6e2c674b235fed6d
|
[
"MIT"
] | 2
|
2020-02-27T12:27:33.000Z
|
2021-05-03T03:44:14.000Z
|
import tensorflow as tf
import tensorflow.keras.layers as layers
from tensorflow.keras.regularizers import l2
def concat_layers(n_filters: int, upsample_features: tf.Tensor,
skip_features: tf.Tensor) -> tf.keras.layers.Concatenate:
""" Layers to concatenate the features from bigger features that have to be upsampled and the features
from the scale we are detecting in this level """
x, x_skip = inputs = layers.Input(upsample_features.shape[1:]), layers.Input(skip_features.shape[1:])
x = conv_2d(x, n_filters, 1, 1)
x = layers.UpSampling2D(2)(x)
x = layers.Concatenate()([x, x_skip])
return tf.keras.Model(inputs, x, name="Concatenate_" + str(n_filters))((upsample_features, skip_features))
def conv_block(x, n_filters_list: list, kernel_size_list: list, n_blocks: int, strides_list: list = None):
""" Convolutional block, the features to reuse are not the output from the last convolutional layer
but the previous one """
assert len(n_filters_list) == len(kernel_size_list)
for block in range(n_blocks):
for conv in range(len(n_filters_list)):
stride = (1 if strides_list is None else strides_list[conv])
x = conv_2d(x, n_filters=n_filters_list[conv], kernel_size=kernel_size_list[conv], stride=stride)
if block == n_blocks - 1 and conv == len(n_filters_list) - 2:
x_features = x
return x, x_features
def conv_2d(x, n_filters: int, kernel_size: int, stride: int, max_pool: bool = False):
"""
Conv2D layer
:param x: input to the layer
:param n_filters: Number of filters for the layer
:param kernel_size: Size of the kernel
:param stride: Stride to apply
:param max_pool: Add a max pooling layer at the end, used in tiny yolo
:return: layer
"""
if stride == 1:
padding = 'same'
else:
x = layers.ZeroPadding2D(((1, 0), (1, 0)))(x) # top left half-padding
padding = 'valid'
# Using bias and Batch normalization makes no sense, therefore we turn it off
x = layers.Conv2D(n_filters, kernel_size, stride, padding, kernel_regularizer=l2(0.0005), use_bias=False)(x)
x = layers.BatchNormalization()(x)
x = layers.LeakyReLU(alpha=0.1)(x)
if max_pool:
x = layers.MaxPool2D(2, 2, 'same')(x)
return x
def darknet_residual_block(x, n_filters_list: list, kernel_sizes_list: list, n_blocks: int, strides_list: list = None):
"""
Darknet residual block consist of a chain of n residual blocks all the same
:param x: input of the block
:param n_filters_list: List with the number of filters for each conv layer
:param kernel_sizes_list: List with the sizes of the kernels for each conv layer
:param n_blocks: Number of blocks
:param strides_list: list with the stride for each conv layer
:return: output of the last residual block
"""
for _ in range(n_blocks):
x = residual_block(x, n_filters_list, kernel_sizes_list, strides_list)
return x
def last_layers(last_layers_input, n_filters_list, kernel_size_list, n_anchors, n_classes):
""" The last layers of the three scales are the same. A convolutional block that extracts features and a
convolutional layer. The last lambda layer reshapes the output so that each anchor mask prediction is
in a different row """
x = input = layers.Input(last_layers_input.shape[1:])
x, x_features = conv_block(x, n_filters_list=n_filters_list, kernel_size_list=kernel_size_list, n_blocks=3)
x = layers.Conv2D(n_anchors * (n_classes + 5), 1, 1, kernel_regularizer=l2(0.0005), use_bias=True)(x)
x = layers.Lambda(lambda x: tf.reshape(x, (-1, tf.shape(x)[1], tf.shape(x)[2], n_anchors, n_classes + 5)))(x)
return tf.keras.Model(input, (x, x_features), name="last_layers_" + str(n_filters_list[0]))(last_layers_input)
def last_tiny_layers(last_layers_input, n_filters, kernel_size, n_anchors, n_classes):
""" The last layers of tiny_yolo_v3 do not have convolutional blocks.
The last lambda layer reshapes the output so that each anchor mask prediction is
in a different row """
x = input = layers.Input(last_layers_input.shape[1:])
x = conv_2d(x, n_filters, kernel_size, 1)
x = layers.Conv2D(n_anchors * (n_classes + 5), 1, 1, kernel_regularizer=l2(0.0005), use_bias=True)(x)
x = layers.Lambda(lambda x: tf.reshape(x, (-1, tf.shape(x)[1], tf.shape(x)[2], n_anchors, n_classes + 5)))(x)
return tf.keras.Model(input, x, name="last_layers_" + str(n_filters))(last_layers_input)
def tiny_layer(x_big_features):
x = input = layers.Input(x_big_features.shape[1:])
x = conv_2d(x, n_filters=256, kernel_size=1, stride=1)
return tf.keras.Model(input, x, name='tiny_layer')(x_big_features)
def residual_block(x, n_filters_list: list, kernel_sizes_list: list, strides_list: list = None):
"""
Residual block of convolutional layers
:param x: input of the block
:param n_filters_list: List with the number of filters for each conv layer
:param kernel_sizes_list: List with the sizes of the kernels for each conv layer
:param strides_list: list with the stride for each conv layer
:return: output of the residual block
"""
assert len(n_filters_list) == len(kernel_sizes_list)
original_input = x
for conv_layer in range(len(n_filters_list)):
stride = (1 if strides_list is None else strides_list[conv_layer])
x = conv_2d(x, n_filters_list[conv_layer], kernel_sizes_list[conv_layer], stride)
x = layers.Add()([original_input, x])
return x
| 48.730435
| 119
| 0.701642
|
da4246f8755c36017baa2425da0f755362539b29
| 468
|
py
|
Python
|
Python/check-if-word-is-valid-after-substitutions.py
|
black-shadows/LeetCode-Solutions
|
b1692583f7b710943ffb19b392b8bf64845b5d7a
|
[
"Fair",
"Unlicense"
] | null | null | null |
Python/check-if-word-is-valid-after-substitutions.py
|
black-shadows/LeetCode-Solutions
|
b1692583f7b710943ffb19b392b8bf64845b5d7a
|
[
"Fair",
"Unlicense"
] | null | null | null |
Python/check-if-word-is-valid-after-substitutions.py
|
black-shadows/LeetCode-Solutions
|
b1692583f7b710943ffb19b392b8bf64845b5d7a
|
[
"Fair",
"Unlicense"
] | null | null | null |
# Time: O(n)
# Space: O(n)
class Solution(object):
def isValid(self, S):
"""
:type S: str
:rtype: bool
"""
stack = []
for i in S:
if i == 'c':
if stack[-2:] == ['a', 'b']:
stack.pop()
stack.pop()
else:
return False
else:
stack.append(i)
return not stack
| 22.285714
| 45
| 0.32265
|
af0639c7e26b5e80852d8598387a5f1af757d40d
| 250
|
py
|
Python
|
NLTK/raws.py
|
samuelwycliffe/Machine-Learning
|
2fa6632ed1cfc591711c6b463ea40f95c2c2ae50
|
[
"MIT"
] | null | null | null |
NLTK/raws.py
|
samuelwycliffe/Machine-Learning
|
2fa6632ed1cfc591711c6b463ea40f95c2c2ae50
|
[
"MIT"
] | null | null | null |
NLTK/raws.py
|
samuelwycliffe/Machine-Learning
|
2fa6632ed1cfc591711c6b463ea40f95c2c2ae50
|
[
"MIT"
] | null | null | null |
from nltk.tokenize import sent_tokenize, word_tokenize
EXAMPLE_TEXT = "Hello Mr. Smith, how are you doing today? The weather is great, and Python is awesome. The sky is pinkish-blue. You shouldn't eat cardboard."
print(sent_tokenize(EXAMPLE_TEXT))
| 41.666667
| 157
| 0.792
|
24f123af501a97e3e02bcda444aa2e8b1fb263d1
| 2,698
|
py
|
Python
|
games/xworld/tasks/XWorldLanObjectToDirection.py
|
emailweixu/XWorld
|
3c5f2180e2152f5d96dde1c1a27c96af07525a4c
|
[
"Apache-2.0"
] | null | null | null |
games/xworld/tasks/XWorldLanObjectToDirection.py
|
emailweixu/XWorld
|
3c5f2180e2152f5d96dde1c1a27c96af07525a4c
|
[
"Apache-2.0"
] | null | null | null |
games/xworld/tasks/XWorldLanObjectToDirection.py
|
emailweixu/XWorld
|
3c5f2180e2152f5d96dde1c1a27c96af07525a4c
|
[
"Apache-2.0"
] | null | null | null |
import random
from xworld_task import XWorldTask
class XWorldLanObjectToDirection(XWorldTask):
def __init__(self, env):
super(XWorldLanObjectToDirection, self).__init__(env)
def idle(self):
"""
Start a task
"""
agent, _, _ = self._get_agent()
goals = self._get_goals()
## delete all the things first
for g in goals:
self._delete_entity(g)
self._delete_entity(agent)
self._set_property(agent, {"loc" : (2, 2)})
self._set_entity_inst(agent)
g_locs = [(3, 2), (1, 2), (2, 1), (2, 3)]
for i, g in enumerate(goals):
self._set_property(g, {"loc": g_locs[i]})
self._set_entity_inst(g)
assert len(goals) > 0, "there is no goal on the map!"
sel_goal = random.choice(goals)
direction = self._get_direction(agent.loc, sel_goal.loc)
## first generate all candidate answers
self._bind("S -> answer")
self._bind("G -> '%s'" % sel_goal.name)
self._bind("D -> '%s'" % direction)
self.answers = self._generate_all()
## then generate the question
self._bind("S -> question")
self._bind("G -> '%s'" % sel_goal.name)
return ["reward", 0.0, self._generate()]
def reward(self):
"""
Giving reward to the agent
"""
_, agent_sent, _ = self._get_agent()
self._set_production_rule(
"R -> " + " ".join(["'" + w + "'" for w in random.choice(self.answers).split()]))
self._bind("S -> reply")
if agent_sent in self.answers:
self._bind("Y -> 'Yes'")
reward = 1.0
self._record_success()
self._record_event("correct_reply", next=True)
else:
self._bind("Y -> 'No'")
reward = -1.0
self._record_failure()
self._record_event("wrong_reply", next=True)
return ["conversation_wrapup", reward, self._generate()]
def get_stage_names(self):
"""
return all the stage names; does not have to be in order
"""
return ["idle", "reward", "conversation_wrapup"]
def _define_grammar(self):
all_goal_names = self._get_all_goal_names_as_rhs()
all_directions = self._get_all_directions_as_rhs()
grammar_str = """
S -> question | answer | reply
question -> 'Where' 'is' G '?'
answer -> A1 | A2
reply -> R | Y R
A1 -> 'On' 'the' D 'is' G
A2 -> G 'is' 'on' 'the' D
D --> %s
G --> %s
Y --> 'Yes' | 'No'
""" % (all_directions, all_goal_names)
return grammar_str, "S"
| 32.119048
| 93
| 0.5404
|
59d206dff7612af87b03dafe4125b6fa7376b907
| 20,185
|
py
|
Python
|
lib/googlecloudsdk/third_party/apis/clouderrorreporting/v1beta1/clouderrorreporting_v1beta1_messages.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/third_party/apis/clouderrorreporting/v1beta1/clouderrorreporting_v1beta1_messages.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/third_party/apis/clouderrorreporting/v1beta1/clouderrorreporting_v1beta1_messages.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | 1
|
2020-07-24T21:52:25.000Z
|
2020-07-24T21:52:25.000Z
|
"""Generated message classes for clouderrorreporting version v1beta1.
Google Stackdriver Error Reporting groups and counts similar errors from cloud
services. The Google Stackdriver Error Reporting API provides read access to
error groups and their associated errors.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from googlecloudsdk.third_party.apitools.base.protorpclite import messages as _messages
from googlecloudsdk.third_party.apitools.base.py import encoding
package = 'clouderrorreporting'
class ClouderrorreportingProjectsDeleteEventsRequest(_messages.Message):
"""A ClouderrorreportingProjectsDeleteEventsRequest object.
Fields:
projectName: The resource name of the Google Cloud Platform project.
Required. Example: `projects/my-project`.
"""
projectName = _messages.StringField(1, required=True)
class ClouderrorreportingProjectsEventsListRequest(_messages.Message):
"""A ClouderrorreportingProjectsEventsListRequest object.
Enums:
TimeRangePeriodValueValuesEnum: Restricts the query to the specified time
range.
Fields:
groupId: The group for which events shall be returned. Required.
pageSize: The maximum number of results to return per response.
pageToken: A `next_page_token` provided by a previous response.
projectName: The resource name of the Google Cloud Platform project.
Required. Example: projects/my-project
serviceFilter_service: The exact value to match against
[`ServiceContext.service`](/error-
reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service).
serviceFilter_version: The exact value to match against
[`ServiceContext.version`](/error-
reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version).
timeRange_period: Restricts the query to the specified time range.
"""
class TimeRangePeriodValueValuesEnum(_messages.Enum):
"""Restricts the query to the specified time range.
Values:
PERIOD_UNSPECIFIED: <no description>
PERIOD_1_HOUR: <no description>
PERIOD_6_HOURS: <no description>
PERIOD_1_DAY: <no description>
PERIOD_1_WEEK: <no description>
PERIOD_30_DAYS: <no description>
"""
PERIOD_UNSPECIFIED = 0
PERIOD_1_HOUR = 1
PERIOD_6_HOURS = 2
PERIOD_1_DAY = 3
PERIOD_1_WEEK = 4
PERIOD_30_DAYS = 5
groupId = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
projectName = _messages.StringField(4, required=True)
serviceFilter_service = _messages.StringField(5)
serviceFilter_version = _messages.StringField(6)
timeRange_period = _messages.EnumField('TimeRangePeriodValueValuesEnum', 7)
class ClouderrorreportingProjectsGroupStatsListRequest(_messages.Message):
"""A ClouderrorreportingProjectsGroupStatsListRequest object.
Enums:
AlignmentValueValuesEnum: The alignment of the timed counts to be
returned. Default is `ALIGNMENT_EQUAL_AT_END`.
OrderValueValuesEnum: The sort order in which the results are returned.
Default is `COUNT_DESC`.
TimeRangePeriodValueValuesEnum: Restricts the query to the specified time
range.
Fields:
alignment: The alignment of the timed counts to be returned. Default is
`ALIGNMENT_EQUAL_AT_END`.
alignmentTime: Time where the timed counts shall be aligned if rounded
alignment is chosen. Default is 00:00 UTC.
groupId: List all `ErrorGroupStats` with these IDs. If not specified, all
error group stats with a non-zero error count for the given selection
criteria are returned.
order: The sort order in which the results are returned. Default is
`COUNT_DESC`.
pageSize: The maximum number of results to return per response. Default is
20.
pageToken: A `next_page_token` provided by a previous response. To view
additional results, pass this token along with the identical query
parameters as the first request.
projectName: The resource name of the Google Cloud Platform project.
Written as `projects/` plus the [Google Cloud Platform project
ID](https://support.google.com/cloud/answer/6158840). Required. Example:
`projects/my-project-123`.
serviceFilter_service: The exact value to match against
[`ServiceContext.service`](/error-
reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service).
serviceFilter_version: The exact value to match against
[`ServiceContext.version`](/error-
reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version).
timeRange_period: Restricts the query to the specified time range.
timedCountDuration: The preferred duration for a single returned
`TimedCount`. If not set, no timed counts are returned.
"""
class AlignmentValueValuesEnum(_messages.Enum):
"""The alignment of the timed counts to be returned. Default is
`ALIGNMENT_EQUAL_AT_END`.
Values:
ERROR_COUNT_ALIGNMENT_UNSPECIFIED: <no description>
ALIGNMENT_EQUAL_ROUNDED: <no description>
ALIGNMENT_EQUAL_AT_END: <no description>
"""
ERROR_COUNT_ALIGNMENT_UNSPECIFIED = 0
ALIGNMENT_EQUAL_ROUNDED = 1
ALIGNMENT_EQUAL_AT_END = 2
class OrderValueValuesEnum(_messages.Enum):
"""The sort order in which the results are returned. Default is
`COUNT_DESC`.
Values:
GROUP_ORDER_UNSPECIFIED: <no description>
COUNT_DESC: <no description>
LAST_SEEN_DESC: <no description>
CREATED_DESC: <no description>
AFFECTED_USERS_DESC: <no description>
"""
GROUP_ORDER_UNSPECIFIED = 0
COUNT_DESC = 1
LAST_SEEN_DESC = 2
CREATED_DESC = 3
AFFECTED_USERS_DESC = 4
class TimeRangePeriodValueValuesEnum(_messages.Enum):
"""Restricts the query to the specified time range.
Values:
PERIOD_UNSPECIFIED: <no description>
PERIOD_1_HOUR: <no description>
PERIOD_6_HOURS: <no description>
PERIOD_1_DAY: <no description>
PERIOD_1_WEEK: <no description>
PERIOD_30_DAYS: <no description>
"""
PERIOD_UNSPECIFIED = 0
PERIOD_1_HOUR = 1
PERIOD_6_HOURS = 2
PERIOD_1_DAY = 3
PERIOD_1_WEEK = 4
PERIOD_30_DAYS = 5
alignment = _messages.EnumField('AlignmentValueValuesEnum', 1)
alignmentTime = _messages.StringField(2)
groupId = _messages.StringField(3, repeated=True)
order = _messages.EnumField('OrderValueValuesEnum', 4)
pageSize = _messages.IntegerField(5, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(6)
projectName = _messages.StringField(7, required=True)
serviceFilter_service = _messages.StringField(8)
serviceFilter_version = _messages.StringField(9)
timeRange_period = _messages.EnumField('TimeRangePeriodValueValuesEnum', 10)
timedCountDuration = _messages.StringField(11)
class ClouderrorreportingProjectsGroupsGetRequest(_messages.Message):
"""A ClouderrorreportingProjectsGroupsGetRequest object.
Fields:
groupName: Group resource name. Required. Example: `projects/my-
project-123/groups/my-group`
"""
groupName = _messages.StringField(1, required=True)
class DeleteEventsResponse(_messages.Message):
"""Response message for deleting error events."""
class ErrorContext(_messages.Message):
"""A description of the context in which an error occurred. This data should
be provided by the application when reporting an error, unless the error
report has been generated automatically from Google App Engine logs. All
fields are optional.
Fields:
httpRequest: The HTTP request which was processed when the error was
triggered.
reportLocation: The location in the source code where the decision was
made to report the error, usually the place where it was logged. For a
logged exception this would be the source line where the exception is
logged, usually close to the place where it was caught. This value is in
contrast to `Exception.cause_location`, which describes the source line
where the exception was thrown.
user: The user who caused or was affected by the crash. This can be a user
ID, an email address, or an arbitrary token that uniquely identifies the
user. When sending an error report, leave this field empty if the user
was not logged in. In this case the Error Reporting system will use
other data, such as remote IP address, to distinguish affected users.
See `affected_users_count` in `ErrorGroupStats`.
"""
httpRequest = _messages.MessageField('HttpRequestContext', 1)
reportLocation = _messages.MessageField('SourceLocation', 2)
user = _messages.StringField(3)
class ErrorEvent(_messages.Message):
"""An error event which is returned by the Error Reporting system.
Fields:
context: Data about the context in which the error occurred.
eventTime: Time when the event occurred as provided in the error report.
If the report did not contain a timestamp, the time the error was
received by the Error Reporting system is used.
message: The stack trace that was reported or logged by the service.
serviceContext: The service_context for which this error was reported.
"""
context = _messages.MessageField('ErrorContext', 1)
eventTime = _messages.StringField(2)
message = _messages.StringField(3)
serviceContext = _messages.MessageField('ServiceContext', 4)
class ErrorGroup(_messages.Message):
"""Description of a group of similar error events.
Fields:
groupId: Group IDs are unique for a given project. If the same kind of
error occurs in different service contexts, it will receive the same
group ID.
name: Group resource name. Example: `projects/my-project-123/groups/my-
groupid`
trackingIssues: Associated tracking issues.
"""
groupId = _messages.StringField(1)
name = _messages.StringField(2)
trackingIssues = _messages.MessageField('TrackingIssue', 3, repeated=True)
class ErrorGroupStats(_messages.Message):
"""Data extracted for a specific group based on certain selection criteria,
such as a given time period and/or service filter.
Fields:
affectedServices: Service contexts with a non-zero error count for the
given selection criteria. This list can be truncated if multiple
services are affected. Refer to `num_affected_services` for the total
count.
affectedUsersCount: Approximate number of affected users in the given
group that match the selection criteria. Users are distinguished by data
in the `ErrorContext` of the individual error events, such as their
login name or their remote IP address in case of HTTP requests. The
number of affected users can be zero even if the number of errors is
non-zero if no data was provided from which the affected user could be
deduced. Users are counted based on data in the request context that was
provided in the error report. If more users are implicitly affected,
such as due to a crash of the whole service, this is not reflected here.
count: Approximate total number of events in the given group that match
the selection criteria.
firstSeenTime: Approximate first occurrence that was seen for this group
and which matches the given selection criteria.
group: Group data that is independent of the selection criteria.
lastSeenTime: Approximate last occurrence that was seen for this group and
which matches the given selection criteria.
numAffectedServices: The total number of services with a non-zero error
count for the given selection criteria.
representative: An arbitrary event that is chosen as representative for
the whole group. The representative event is intended to be used as a
quick preview for the whole group. Events in the group are usually
sufficiently similar to each other such that showing an arbitrary
representative provides insight into the characteristics of the group as
a whole.
timedCounts: Approximate number of occurrences over time. Timed counts
returned by ListGroups are guaranteed to be: - Inside the requested
time interval - Non-overlapping, and - Ordered by ascending time.
"""
affectedServices = _messages.MessageField('ServiceContext', 1, repeated=True)
affectedUsersCount = _messages.IntegerField(2)
count = _messages.IntegerField(3)
firstSeenTime = _messages.StringField(4)
group = _messages.MessageField('ErrorGroup', 5)
lastSeenTime = _messages.StringField(6)
numAffectedServices = _messages.IntegerField(7, variant=_messages.Variant.INT32)
representative = _messages.MessageField('ErrorEvent', 8)
timedCounts = _messages.MessageField('TimedCount', 9, repeated=True)
class HttpRequestContext(_messages.Message):
"""HTTP request data that is related to a reported error. This data should
be provided by the application when reporting an error, unless the error
report has been generated automatically from Google App Engine logs. All
fields are optional.
Fields:
method: The type of HTTP request, such as `GET`, `POST`, etc.
referrer: The referrer information that is provided with the request.
remoteIp: The IP address from which the request originated. This can be
IPv4, IPv6, or a token which is derived from the IP address, depending
on the data that has been provided in the error report.
responseStatusCode: The HTTP response status code for the request.
url: The URL of the request.
userAgent: The user agent information that is provided with the request.
"""
method = _messages.StringField(1)
referrer = _messages.StringField(2)
remoteIp = _messages.StringField(3)
responseStatusCode = _messages.IntegerField(4, variant=_messages.Variant.INT32)
url = _messages.StringField(5)
userAgent = _messages.StringField(6)
class ListEventsResponse(_messages.Message):
"""Contains a set of requested error events.
Fields:
errorEvents: The error events which match the given request.
nextPageToken: If non-empty, more results are available. Pass this token,
along with the same query parameters as the first request, to view the
next page of results.
"""
errorEvents = _messages.MessageField('ErrorEvent', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListGroupStatsResponse(_messages.Message):
"""Contains a set of requested error group stats.
Fields:
errorGroupStats: The error group stats which match the given request.
nextPageToken: If non-empty, more results are available. Pass this token,
along with the same query parameters as the first request, to view the
next page of results.
"""
errorGroupStats = _messages.MessageField('ErrorGroupStats', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ServiceContext(_messages.Message):
"""Describes a running service that sends errors. Its version changes over
time and multiple versions can run in parallel.
Fields:
service: An identifier of the service, such as the name of the executable,
job, or Google App Engine module name. This field is expected to have a
low number of values that are relatively stable over time, as opposed to
`version`, which can be changed whenever new code is deployed. Contains
the module name for error reports extracted from Google App Engine logs
or `default` if the App Engine default module is used.
version: Represents the source code version that the developer provided,
which could represent a version label or a Git SHA-1 hash, for example.
"""
service = _messages.StringField(1)
version = _messages.StringField(2)
class SourceLocation(_messages.Message):
"""Indicates a location in the source code of the service for which errors
are reported. This data should be provided by the application when reporting
an error, unless the error report has been generated automatically from
Google App Engine logs. All fields are optional.
Fields:
filePath: The source code filename, which can include a truncated relative
path, or a full path from a production machine.
functionName: Human-readable name of a function or method. The value can
include optional context like the class or package name. For example,
`my.package.MyClass.method` in case of Java.
lineNumber: 1-based. 0 indicates that the line number is unknown.
"""
filePath = _messages.StringField(1)
functionName = _messages.StringField(2)
lineNumber = _messages.IntegerField(3, variant=_messages.Variant.INT32)
class StandardQueryParameters(_messages.Message):
"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
bearer_token: OAuth bearer token.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
pp: Pretty-print response.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
bearer_token = _messages.StringField(4)
callback = _messages.StringField(5)
fields = _messages.StringField(6)
key = _messages.StringField(7)
oauth_token = _messages.StringField(8)
pp = _messages.BooleanField(9, default=True)
prettyPrint = _messages.BooleanField(10, default=True)
quotaUser = _messages.StringField(11)
trace = _messages.StringField(12)
uploadType = _messages.StringField(13)
upload_protocol = _messages.StringField(14)
class TimedCount(_messages.Message):
"""The number of errors in a given time period. All numbers are approximate
since the error events are sampled before counting them.
Fields:
count: Approximate number of occurrences in the given time period.
endTime: End of the time period to which `count` refers (excluded).
startTime: Start of the time period to which `count` refers (included).
"""
count = _messages.IntegerField(1)
endTime = _messages.StringField(2)
startTime = _messages.StringField(3)
class TrackingIssue(_messages.Message):
"""Information related to tracking the progress on resolving the error.
Fields:
url: A URL pointing to a related entry in an issue tracking system.
Example: https://github.com/user/project/issues/4
"""
url = _messages.StringField(1)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv',
package=u'clouderrorreporting')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1',
package=u'clouderrorreporting')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2',
package=u'clouderrorreporting')
| 40.450902
| 87
| 0.748229
|
d3ee5951809f8a2bf4e1a39418e037ea297c315d
| 5,188
|
py
|
Python
|
train.py
|
baoyujing/multi_gras
|
1bc9b72bee786f5a439c1119a86b6ebaa19bf9f9
|
[
"MIT"
] | null | null | null |
train.py
|
baoyujing/multi_gras
|
1bc9b72bee786f5a439c1119a86b6ebaa19bf9f9
|
[
"MIT"
] | null | null | null |
train.py
|
baoyujing/multi_gras
|
1bc9b72bee786f5a439c1119a86b6ebaa19bf9f9
|
[
"MIT"
] | null | null | null |
import os
import yaml
import time
import json
import torch
import logging
import argparse
from tqdm import tqdm
from modules.multi_gras import MultiGraS
from data_management.vocabulary import Vocabulary
from data_management.data_loader import DataLoader
from evaluate import Evaluator
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class Trainer:
def __init__(self, configs_path, model, loader_train, loader_val):
self.configs = self.default_configs()
configs = yaml.safe_load(open(configs_path))
self.configs.update(configs)
self.model = model
if self.configs["cuda"]:
self.model = model.cuda()
if self.configs["pretrained_path"] is not None:
self.model.load_state_dict(torch.load(self.configs["pretrained_path"]))
self.loader_train = loader_train
self.loader_val = loader_val
self.optimizer = torch.optim.Adam(model.parameters(), lr=float(self.configs["lr"]))
self.criterion = torch.nn.CrossEntropyLoss(reduction="none")
self.save_root = self.configs["save_root"]
if not os.path.exists(self.save_root):
os.makedirs(self.save_root)
def train(self):
start_time = time.time()
cnt_batch = 0
f = open("results_eval.txt", "w")
f2 = open("results_loss.txt", "w")
for epoch in range(self.configs["n_epoch"]):
logger.info("Epoch: {}".format(epoch))
self.model.train()
epoch_loss = 0.0
for i, pack in enumerate(tqdm(self.loader_train)):
start_time = time.time()
self.optimizer.zero_grad()
if self.configs["cuda"]:
pack["document"] = pack["document"].cuda()
pack["graphs"] = pack["graphs"].cuda()
pack["graphs_sent"] = [adj.cuda() for adj in pack["graphs_sent"]]
outputs = self.model(doc_input=pack["document"],
sent_len_list=pack["sent_len_list"],
adjs=pack["graphs"],
n_sent_list=pack["n_sent"],
adjs_sents=pack["graphs_sent"])
labels = self.get_labels(pack["oracle_summary"], pack["n_sent"])
loss = self.criterion(outputs, labels)
loss = loss.mean()
loss.backward()
self.optimizer.step()
batch_loss = loss.detach().cpu().numpy()
epoch_loss += batch_loss
torch.cuda.empty_cache()
cnt_batch += 1
logger.info("Epoch: {}, loss: {:.2f}, time elapsed: {:.2f}s.".
format(epoch, epoch_loss/len(self.loader_train), time.time() - start_time))
f2.writelines(str(epoch_loss))
f2.writelines("\n")
f2.flush()
# save model
path = os.path.join(self.save_root, "{}.ckpt".format(epoch))
torch.save(self.model.state_dict(), path)
# evaluate
scores = self.eval()
f.writelines(json.dumps(scores))
f.writelines("\n")
f.flush()
def get_labels(self, oracle_summary, len_list):
"""
oracle_summary: [oracles]
:return labels: [n] 0 or 1
"""
labels = []
for i, idx in enumerate(oracle_summary):
label = torch.zeros(len_list[i], dtype=torch.long)
indices = oracle_summary[i]
label[indices] = 1
labels.append(label)
labels = torch.cat(labels)
if self.configs["cuda"]:
return labels.to("cuda")
return labels
def eval(self):
evaluator = Evaluator(model=self.model, data_loader=self.loader_val, cuda=self.configs["cuda"],
cnn_eval_path=self.configs["cnn_eval_path"])
scores_all = evaluator.eval()
return scores_all
@staticmethod
def default_configs():
return {
"lr": 5e-4,
"batch_size": 32,
"n_epoch": 10,
"save_root": "./ckpts",
"pretrained_path": None,
"cuda": True,
"cnn_split_path": None
}
parser = argparse.ArgumentParser()
parser.add_argument("--config_trainer", type=str, default="./configs/trainer.yml")
parser.add_argument("--config_model", type=str, default="./configs/model.yml")
parser.add_argument("--config_dataloader", type=str, default="./configs/dataloader.yml")
parser.add_argument("--config_vocabulary", type=str, default="./configs/vocabulary.yml")
args = parser.parse_args()
vocabulary = Vocabulary(configs_path=args.config_vocabulary)
model = MultiGraS(configs_path=args.config_model, vocabulary=vocabulary)
loader_train = DataLoader(config_path=args.config_dataloader, vocabulary=vocabulary, split="train")
loader_val = DataLoader(config_path=args.config_dataloader, vocabulary=vocabulary, split="val")
trainer = Trainer(configs_path=args.config_trainer, model=model, loader_train=loader_train, loader_val=loader_val)
trainer.train()
| 36.027778
| 114
| 0.595027
|
5cb42fef59f4f2c234cd6fc8bc17bb516f5721b9
| 604
|
py
|
Python
|
blender/arm/logicnode/math/LN_separate_rgb.py
|
Lykdraft/armory
|
da1cf33930ce9a8b1865d35c128fe4842bef2933
|
[
"Zlib"
] | null | null | null |
blender/arm/logicnode/math/LN_separate_rgb.py
|
Lykdraft/armory
|
da1cf33930ce9a8b1865d35c128fe4842bef2933
|
[
"Zlib"
] | null | null | null |
blender/arm/logicnode/math/LN_separate_rgb.py
|
Lykdraft/armory
|
da1cf33930ce9a8b1865d35c128fe4842bef2933
|
[
"Zlib"
] | null | null | null |
from arm.logicnode.arm_nodes import *
class SeparateColorNode(ArmLogicTreeNode):
"""Use to split a color into RGB values."""
bl_idname = 'LNSeparateColorNode'
bl_label = 'Separate RGB'
arm_version = 1
def init(self, context):
super(SeparateColorNode, self).init(context)
self.add_input('NodeSocketColor', 'Color', default_value=[1.0, 1.0, 1.0, 1.0])
self.add_output('NodeSocketFloat', 'R')
self.add_output('NodeSocketFloat', 'G')
self.add_output('NodeSocketFloat', 'B')
add_node(SeparateColorNode, category=PKG_AS_CATEGORY, section='color')
| 35.529412
| 86
| 0.688742
|
20ab7bc6ec02b4f298dbe73f357afa93b60f4b95
| 21,205
|
py
|
Python
|
model_compound_scaling/main_grid_search.py
|
d-becking/neurips-2019-micronet-challenge
|
157fa47e979f684f682f415b10dce1e3aa1b457b
|
[
"MIT"
] | 6
|
2019-12-13T22:47:17.000Z
|
2021-01-22T11:05:34.000Z
|
model_compound_scaling/main_grid_search.py
|
d-becking/neurips-2019-micronet-challenge
|
157fa47e979f684f682f415b10dce1e3aa1b457b
|
[
"MIT"
] | 12
|
2019-10-31T23:30:05.000Z
|
2022-01-13T01:43:15.000Z
|
model_compound_scaling/main_grid_search.py
|
d-becking/neurips-2019-micronet-challenge
|
157fa47e979f684f682f415b10dce1e3aa1b457b
|
[
"MIT"
] | 1
|
2021-03-30T08:58:04.000Z
|
2021-03-30T08:58:04.000Z
|
'''
References:
https://github.com/TropComplique/trained-ternary-quantization
MIT License - Copyright (c) 2017 Dan Antoshchenko
https://github.com/uoguelph-mlrg/Cutout
Educational Community License, Version 2.0 (ECL-2.0) - Copyright (c) 2019 Vithursan Thangarasa
https://github.com/lukemelas/EfficientNet-PyTorch
Apache License, Version 2.0 - Copyright (c) 2019 Luke Melas-Kyriazi
https://github.com/akamaster/pytorch_resnet_cifar10
Yerlan Idelbayev's ResNet implementation for CIFAR10/CIFAR100 in PyTorch
This document is the main document for applying grid search on depth and width scaling factors for the MicroNet
architectures which can be found in ./model/model.
This document contains the following functions/classes:
- main()
- Cutout(object)
- grid_search(train_loader, val_loader, criterion, alpha, beta)
- train(train_loader, model, criterion, optimizer, epoch)
- evaluate(model, loss, val_iterator)
'''
import argparse
import time
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
from torchsummary import summary
import numpy as np
import os
import PIL
from model import micronet, image_micronet, EfficientNet, best_cifar_micronet
parser = argparse.ArgumentParser(description='Grid search for model compound scaling')
parser.add_argument('--workers', default=4, type=int, metavar='4',
help='Number of data loading workers (default: 4 per GPU)')
parser.add_argument('--epochs', default=200, type=int, metavar=200,
help='Number of epochs to run (default: 200)')
parser.add_argument('--start-epoch', default=0, type=int, metavar=0,
help='Start epoch, e.g. for restarts (default: 0)')
parser.add_argument('--batch-size', type=int, default=256, metavar=256,
help='Batch size for training (default=256)')
parser.add_argument('--val-batch-size', type=int, default=512, metavar=512,
help='Batch size for validation (default=512)')
parser.add_argument('--lr', default=0.1, type=float,
metavar=0.1, help='Initial learning rate (default 0.1)')
parser.add_argument('--momentum', default=0.9, type=float, metavar=0.9,
help='Momentum for SGD (default 0.9)')
parser.add_argument('--weight-decay', default=5e-4, type=float,
metavar=5e-4, help='Weight decay (default: 5e-4)')
parser.add_argument('--grid', type=float, default=(1.4, 1.2), nargs='+', metavar='1.4 1.2 1.2 1.3 ...',
help='Grid of alternating depth and width multipliers d and w (e.g.: d0, w0, d1, w1, ..., '
'dn, wn for n tuples)')
parser.add_argument('--phi', type=float, default=3.5, metavar=3.5,
help='Phi is the exponential scaling factor for width and depth multipliers (default: 3.5)')
parser.add_argument('--cuda', default=True, action='store_true',
help='By default CUDA training on GPU is enabled')
parser.add_argument('--no-cuda', dest='cuda', action='store_false',
help='Disables CUDA training and maps data plus model to CPU')
parser.add_argument('--dataset', default='CIFAR100', type=str, metavar='cifar',
help='Dataset to use. Choose from [CIFAR100, ImageNet] (default: CIFAR100)')
parser.add_argument('--image-size', default=32, type=int, metavar=32,
help='Input image size. Choose from [32 for CIFAR, 128-600 for ImageNet] (default: 32)')
parser.add_argument('--data-path', default='../data', type=str, metavar='/path',
help='Path to ImageNet data. CIFAR data will be downloaded to "../data" directory automatically '
'if the data-path argument is ignored')
parser.add_argument('--resume', default='./model_compound_scaling/saved_models/checkpoint.pt', type=str,
metavar='/path_cp',
help='Path to latest checkpoint. If it exists the train procedure will be resumed'
'(default: ./model_compound_scaling/saved_models/checkpoint.pt)')
def train(train_loader, model, criterion, optimizer, epoch):
"""
Training procedure.
Parameters:
-----------
train_loader:
PyTorch Dataloader for given train dataset.
model:
The neural network model.
criterion:
Loss function to use (e.g. cross entropy, MSE, ...).
optimizer:
Updating model parameters with Gradient Descent plus Nesterov momentum and weight decay.
epoch:
Current training epoch.
Returns:
--------
running loss
running accuracy
"""
# Resetting variables for next epoch
running_loss = 0.0
running_accuracy = 0.0
n_steps = 0
# Iterating over training dataloader
for input, target in train_loader:
# Measure data loading time
end = time.time()
# Resetting variables for calculating current batch accuracy
correct = 0
total = 0
# Map data to GPU if available
if use_cuda:
input, target = input.cuda(), target.cuda(non_blocking=True)
# Forward pass
output = model(input)
# Benchmark mode can allocate large memory blocks during the very first forward to test algorithms
if epoch == 0:
torch.cuda.empty_cache()
# Compute batch_loss
batch_loss = criterion(output, target)
# Compute accuracy
_, predicted = output.max(1)
total += target.size(0)
correct += predicted.eq(target).sum().item()
batch_accuracy = 100. * correct / total
# Compute gradient and do optimization step
optimizer.zero_grad()
batch_loss.backward()
optimizer.step()
# Summing up batch losses and accuracies over each step
running_loss += batch_loss.float()
running_accuracy += batch_accuracy
n_steps += 1
# Printing preliminary results
if n_steps % 50 == 0:
total_steps = int(len(train_loader.dataset) / train_loader.batch_size) + 1
print('Epoch: {}, step: {}/{}, running_loss: {:.4f}, batch_acc: {:.4f}, running_acc: {:.4f}, '
'elapsed time: {:.2f} s, max_mem_alloc: {:.2f} GB, max_mem_cache {:.2f} GB'.format(
epoch, n_steps, total_steps, running_loss / n_steps, batch_accuracy, running_accuracy / n_steps,
(time.time() - end) * 100, torch.cuda.max_memory_allocated() / 1024 ** 3,
torch.cuda.max_memory_cached() / 1024 ** 3))
return running_loss / n_steps, running_accuracy / n_steps
def evaluate(model, loss, val_iterator):
"""
Evaluating the model (no backward path / opimization step)
Parameters:
-----------
model:
The neural network model.
loss:
Loss function used for optimization (e.g. cross entropy, MSE, ...).
val_iterator:
PyTorch Dataloader for dataset which should be evaluated.
Returns:
--------
Validation loss.
Validation accuracy.
"""
# Initializing parameters
loss_value = 0.0
accuracy = 0.0
total_samples = 0
with torch.no_grad():
# Iterating over validation dataloader
for data, labels in val_iterator:
# Resetting variables for calculating current batch accuracy
correct = 0
total = 0
# Map data to GPU if available
if use_cuda:
data = data.cuda()
labels = labels.cuda(non_blocking=True)
n_batch_samples = labels.size()[0]
logits = model(data)
# Compute batch loss
batch_loss = loss(logits, labels)
# Compute batch accuracy
_, predicted = logits.max(1)
total += labels.size(0)
correct += predicted.eq(labels).sum().item()
batch_accuracy = 100. * correct / total
# Summing up batch losses and accuracies over each step
loss_value += batch_loss.float() * n_batch_samples
accuracy += batch_accuracy * n_batch_samples
total_samples += n_batch_samples
return loss_value / total_samples, accuracy / total_samples
def grid_search(train_loader, val_loader, criterion, alpha, beta):
"""
Builds the model with given scaling factors, sets up optimizer and learning rate schedulers plus executes
training and evaluation of the model. A checkpoint is created each epoch. Also the best model will be saved.
Parameters:
-----------
train_loader:
PyTorch Dataloader for given train dataset.
val_loader:
PyTorch Dataloader for given validation dataset.
criterion:
Loss function to use (e.g. cross entropy, MSE, ...).
alpha:
Scaling factor for model depth.
beta:
Scaling factor for model width.
"""
# Initializing training variables
best_acc = 0
all_losses = []
# Initializing log file
logfile = open('./model_compound_scaling/logfiles/logfile.txt', 'a+')
logfile.write('depth multiplier: {}, width multiplier: {}\n'.format(alpha, beta))
# Building the model
if args.dataset == 'CIFAR100' or args.dataset == 'CIFAR10':
model = micronet(d_multiplier=alpha, w_multiplier=beta)
elif args.dataset == 'ImageNet':
model = image_micronet(d_multiplier=alpha, w_multiplier=beta)
# If multipile GPUs are used
if use_cuda and torch.cuda.device_count() > 1:
print("Let's use", torch.cuda.device_count(), "GPUs!")
model = nn.DataParallel(model)
# Transfers model to device (GPU/CPU). Device is globally initialized.
model.to(device)
# Defining the optimizer
optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum,
weight_decay=args.weight_decay, nesterov=True)
# KERAS like summary of the model architecture
# summary(your_model, input_size=(channels, H, W), batch_size=-1, device="cuda")
if use_cuda:
if args.dataset == 'CIFAR100' or args.dataset == 'CIFAR10':
summary(model, (3, 32, 32), batch_size=args.batch_size)
print(model)
elif args.dataset == 'ImageNet':
summary(model, (3, args.image_size, args.image_size), batch_size=args.batch_size)
print(model)
# Optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint['epoch']
best_acc = checkpoint['acc']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
load_last_epoch = checkpoint['epoch']-1
print("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
load_last_epoch = -1
# Learning rate schedulers for cifar_micronet and imagenet_micronet
if args.dataset == 'CIFAR100' or args.data == 'CIFAR10':
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer,
T_max = args.epochs,
eta_min = 0,
last_epoch = load_last_epoch)
elif args.dataset == 'ImageNet':
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,
milestones=[30, 60, 90],
gamma=0.1,
last_epoch = load_last_epoch)
# START TRAINING
start_time = time.time()
model.train()
for epoch in range(args.start_epoch, args.epochs):
print('current lr {:.5e}'.format(optimizer.param_groups[0]['lr']))
# Executing training process
running_loss, running_accuracy = train(train_loader, model, criterion, optimizer, epoch)
# Evaluation
model.eval()
val_loss, val_accuracy = evaluate(model, criterion, val_loader)
# Logging the accuracies
all_losses += [(epoch, running_loss, val_loss, running_accuracy, val_accuracy)]
print('Epoch {0} running loss {1:.3f} val loss {2:.3f} running acc {3:.3f} '
'val acc{4:.3f} time {5:.3f}'.format(*all_losses[-1], time.time() - start_time))
logfile.write('Epoch {0} running loss {1:.3f} val loss {2:.3f} running acc {3:.3f} '
'val acc{4:.3f} time {5:.3f}\n'.format(*all_losses[-1], time.time() - start_time))
# Saving checkpoint
torch.save({
'epoch': epoch,
'state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'acc': val_accuracy,
'lr': optimizer.param_groups[0]['lr']
}, args.resume)
# Make a lr scheduler step
lr_scheduler.step()
# Checking if current epoch yielded best validation accuracy
is_best = val_accuracy > best_acc
best_acc = max(val_accuracy, best_acc)
# If so, saving best model state_dict
if is_best and epoch > 0:
torch.save(model.state_dict(), './model_compound_scaling/saved_models/best_model.pt')
# Switch back to train mode
model.train()
start_time = time.time()
class Cutout(object):
"""
Randomly mask out one or more patches from an image.
Parameters:
-----------
n_holes (int):
Number of patches to cut out of each image.
length (int):
The length (in pixels) of each square patch.
"""
def __init__(self, n_holes, length):
self.n_holes = n_holes
self.length = length
def __call__(self, img):
"""
Parameters:
-----------
img (Tensor):
Tensor image of size (C, H, W).
Returns:
--------
Tensor:
Image with n_holes of dimension length x length cut out of it.
"""
h = img.size(1)
w = img.size(2)
mask = np.ones((h, w), np.float32)
for n in range(self.n_holes):
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img = img * mask
return img
def main():
"""
--------------------------------------------- MAIN --------------------------------------------------------
Loads the data and executes the grid search on depth and width scaling factors.
"""
# Manual seed for reproducibility
torch.manual_seed(363636)
# Global instances
global args, use_cuda, device
# Instantiating the parser
args = parser.parse_args()
# Global CUDA flag
use_cuda = args.cuda and torch.cuda.is_available()
# Defining device and device's map locationo
device = torch.device("cuda" if use_cuda else "cpu")
print('chosen device: ', device)
# Defining loss function and printing CUDA information (if available)
if use_cuda:
print("PyTorch version: ")
print(torch.__version__)
print("CUDA Version: ")
print(torch.version.cuda)
print("cuDNN version is: ")
print(cudnn.version())
cudnn.benchmark = True
criterion = nn.CrossEntropyLoss().cuda()
else:
criterion = nn.CrossEntropyLoss()
# Dataloaders for CIFAR, ImageNet and MNIST
if args.dataset == 'CIFAR100':
normalize = transforms.Normalize(mean=[x / 255.0 for x in [125.3, 123.0, 113.9]],
std=[x / 255.0 for x in [63.0, 62.1, 66.7]])
kwargs = {'num_workers': args.workers, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR100(root=args.data_path, train=True, transform=transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.075),
transforms.ToTensor(),
normalize,
Cutout(n_holes=1, length=16),
]), download=True),
batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR100(root=args.data_path, train=False, transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=args.val_batch_size, shuffle=False, **kwargs)
elif args.dataset == 'ImageNet':
traindir = os.path.join(args.data_path, 'train')
valdir = os.path.join(args.data_path, 'val')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_dataset = datasets.ImageFolder(
traindir,
transforms.Compose([
transforms.RandomResizedCrop(args.image_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
image_size = args.image_size
val_dataset = datasets.ImageFolder(
valdir,
transforms.Compose([
transforms.Resize(image_size, interpolation=PIL.Image.BICUBIC),
transforms.CenterCrop(image_size),
transforms.ToTensor(),
normalize,
]))
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=args.val_batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
elif args.dataset == 'MNIST':
kwargs = {'num_workers': args.workers, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(args.data_path, train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = torch.utils.data.DataLoader(
datasets.MNIST(args.data_path, train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.val_batch_size, shuffle=True, **kwargs)
elif args.dataset == 'CIFAR10':
normalize = transforms.Normalize(mean=[x / 255.0 for x in [125.3, 123.0, 113.9]],
std=[x / 255.0 for x in [63.0, 62.1, 66.7]])
kwargs = {'num_workers': args.workers, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=args.data_path, train=True, transform=transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ToTensor(),
normalize,
]), download=True),
batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=args.data_path, train=False, transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=args.val_batch_size, shuffle=False, **kwargs)
# original grid = [(1.0, 1.0), (1.9, 1.0), (1.7, 1.1), (1.6, 1.1), (1.4, 1.2), (1.2, 1.3), (1.0, 1.4)]
grid = [(args.grid[i], args.grid[i+1]) for i in range(0, len(args.grid), 2)]
for coeff in grid:
alpha = coeff[0] ** args.phi
beta = coeff[1] ** args.phi
grid_search(train_loader, val_loader, criterion, alpha, beta)
if __name__ == '__main__':
t1_start = time.perf_counter()
main()
t1_stop = time.perf_counter()
print("Elapsed time: %.2f [s]" % (t1_stop - t1_start))
| 38.836996
| 118
| 0.590946
|
3ccc8e234d64c6afbb716972d56c7fa3d5383223
| 65,019
|
py
|
Python
|
test/functional/feature_block.py
|
criptolot/bsvcoin
|
125fc951c1bb5a87b706c5a3821a1e3252f45a3d
|
[
"MIT"
] | null | null | null |
test/functional/feature_block.py
|
criptolot/bsvcoin
|
125fc951c1bb5a87b706c5a3821a1e3252f45a3d
|
[
"MIT"
] | null | null | null |
test/functional/feature_block.py
|
criptolot/bsvcoin
|
125fc951c1bb5a87b706c5a3821a1e3252f45a3d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block processing."""
import copy
import struct
import time
from test_framework.blocktools import (
create_block,
create_coinbase,
create_tx_with_script,
get_legacy_sigopcount_block,
MAX_BLOCK_SIGOPS,
)
from test_framework.key import ECKey
from test_framework.messages import (
CBlock,
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
MAX_BLOCK_BASE_SIZE,
uint256_from_compact,
uint256_from_str,
)
from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
MAX_SCRIPT_ELEMENT_SIZE,
OP_2DUP,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_ELSE,
OP_ENDIF,
OP_EQUAL,
OP_DROP,
OP_FALSE,
OP_HASH160,
OP_IF,
OP_INVALIDOPCODE,
OP_RETURN,
OP_TRUE,
SIGHASH_ALL,
LegacySignatureHash,
hash160,
)
from test_framework.test_framework import BsvcoinTestFramework
from test_framework.util import assert_equal
from data import invalid_txs
# Use this class for tests that require behavior other than normal p2p behavior.
# For now, it is used to serialize a bloated varint (b64).
class CBrokenBlock(CBlock):
def initialize(self, base_block):
self.vtx = copy.deepcopy(base_block.vtx)
self.hashMerkleRoot = self.calc_merkle_root()
def serialize(self, with_witness=False):
r = b""
r += super(CBlock, self).serialize()
r += struct.pack("<BQ", 255, len(self.vtx))
for tx in self.vtx:
if with_witness:
r += tx.serialize_with_witness()
else:
r += tx.serialize_without_witness()
return r
def normal_serialize(self):
return super().serialize()
DUPLICATE_COINBASE_SCRIPT_SIG = b'\x01\x78' # Valid for block at height 120
class FullBlockTest(BsvcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [['-acceptnonstdtxn=1']] # This is a consensus block test, we don't care about tx policy
def run_test(self):
node = self.nodes[0] # convenience reference to the node
self.bootstrap_p2p() # Add one p2p connection to the node
self.block_heights = {}
self.coinbase_key = ECKey()
self.coinbase_key.generate()
self.coinbase_pubkey = self.coinbase_key.get_pubkey().get_bytes()
self.tip = None
self.blocks = {}
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
self.spendable_outputs = []
# Create a new block
b_dup_cb = self.next_block('dup_cb')
b_dup_cb.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG
b_dup_cb.vtx[0].rehash()
duplicate_tx = b_dup_cb.vtx[0]
b_dup_cb = self.update_block('dup_cb', [])
self.send_blocks([b_dup_cb])
b0 = self.next_block(0)
self.save_spendable_output()
self.send_blocks([b0])
# These constants chosen specifically to trigger an immature coinbase spend
# at a certain time below.
NUM_BUFFER_BLOCKS_TO_GENERATE = 99
NUM_OUTPUTS_TO_COLLECT = 33
# Allow the block to mature
blocks = []
for i in range(NUM_BUFFER_BLOCKS_TO_GENERATE):
blocks.append(self.next_block(f"maturitybuffer.{i}"))
self.save_spendable_output()
self.send_blocks(blocks)
# collect spendable outputs now to avoid cluttering the code later on
out = []
for _ in range(NUM_OUTPUTS_TO_COLLECT):
out.append(self.get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
b1 = self.next_block(1, spend=out[0])
self.save_spendable_output()
b2 = self.next_block(2, spend=out[1])
self.save_spendable_output()
self.send_blocks([b1, b2], timeout=4)
# Select a txn with an output eligible for spending. This won't actually be spent,
# since we're testing submission of a series of blocks with invalid txns.
attempt_spend_tx = out[2]
# Submit blocks for rejection, each of which contains a single transaction
# (aside from coinbase) which should be considered invalid.
for TxTemplate in invalid_txs.iter_all_templates():
template = TxTemplate(spend_tx=attempt_spend_tx)
if template.valid_in_block:
continue
self.log.info(f"Reject block with invalid tx: {TxTemplate.__name__}")
blockname = f"for_invalid.{TxTemplate.__name__}"
badblock = self.next_block(blockname)
badtx = template.get_tx()
if TxTemplate != invalid_txs.InputMissing:
self.sign_tx(badtx, attempt_spend_tx)
badtx.rehash()
badblock = self.update_block(blockname, [badtx])
self.send_blocks(
[badblock], success=False,
reject_reason=(template.block_reject_reason or template.reject_reason),
reconnect=True, timeout=2)
self.move_tip(2)
# Fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this point. We saw b2 first so it takes priority.
self.log.info("Don't reorg to a chain of the same length")
self.move_tip(1)
b3 = self.next_block(3, spend=out[1])
txout_b3 = b3.vtx[1]
self.send_blocks([b3], False)
# Now we add another block to make the alternative chain longer.
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1) -> b4 (2)
self.log.info("Reorg to a longer chain")
b4 = self.next_block(4, spend=out[2])
self.send_blocks([b4])
# ... and back to the first chain.
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b3 (1) -> b4 (2)
self.move_tip(2)
b5 = self.next_block(5, spend=out[2])
self.save_spendable_output()
self.send_blocks([b5], False)
self.log.info("Reorg back to the original chain")
b6 = self.next_block(6, spend=out[3])
self.send_blocks([b6], True)
# Try to create a fork that double-spends
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b7 (2) -> b8 (4)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a chain with a double spend, even if it is longer")
self.move_tip(5)
b7 = self.next_block(7, spend=out[2])
self.send_blocks([b7], False)
b8 = self.next_block(8, spend=out[4])
self.send_blocks([b8], False, reconnect=True)
# Try to create a block that has too much fee
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b9 (4)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block where the miner creates too much coinbase reward")
self.move_tip(6)
b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1)
self.send_blocks([b9], success=False, reject_reason='bad-cb-amount', reconnect=True)
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b10 (3) -> b11 (4)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer")
self.move_tip(5)
b10 = self.next_block(10, spend=out[3])
self.send_blocks([b10], False)
b11 = self.next_block(11, spend=out[4], additional_coinbase_value=1)
self.send_blocks([b11], success=False, reject_reason='bad-cb-amount', reconnect=True)
# Try again, but with a valid fork first
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b14 (5)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer (on a forked chain)")
self.move_tip(5)
b12 = self.next_block(12, spend=out[3])
self.save_spendable_output()
b13 = self.next_block(13, spend=out[4])
self.save_spendable_output()
b14 = self.next_block(14, spend=out[5], additional_coinbase_value=1)
self.send_blocks([b12, b13, b14], success=False, reject_reason='bad-cb-amount', reconnect=True)
# New tip should be b13.
assert_equal(node.getbestblockhash(), b13.hash)
# Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
# \-> b3 (1) -> b4 (2)
self.log.info("Accept a block with lots of checksigs")
lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1))
self.move_tip(13)
b15 = self.next_block(15, spend=out[5], script=lots_of_checksigs)
self.save_spendable_output()
self.send_blocks([b15], True)
self.log.info("Reject a block with too many checksigs")
too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
b16 = self.next_block(16, spend=out[6], script=too_many_checksigs)
self.send_blocks([b16], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# Attempt to spend a transaction created on a different fork
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block with a spend from a re-org'ed out tx")
self.move_tip(15)
b17 = self.next_block(17, spend=txout_b3)
self.send_blocks([b17], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to spend a transaction created on a different fork (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b18 (b3.vtx[1]) -> b19 (6)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block with a spend from a re-org'ed out tx (on a forked chain)")
self.move_tip(13)
b18 = self.next_block(18, spend=txout_b3)
self.send_blocks([b18], False)
b19 = self.next_block(19, spend=out[6])
self.send_blocks([b19], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to spend a coinbase at depth too low
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block spending an immature coinbase.")
self.move_tip(15)
b20 = self.next_block(20, spend=out[7])
self.send_blocks([b20], success=False, reject_reason='bad-txns-premature-spend-of-coinbase', reconnect=True)
# Attempt to spend a coinbase at depth too low (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b21 (6) -> b22 (5)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block spending an immature coinbase (on a forked chain)")
self.move_tip(13)
b21 = self.next_block(21, spend=out[6])
self.send_blocks([b21], False)
b22 = self.next_block(22, spend=out[5])
self.send_blocks([b22], success=False, reject_reason='bad-txns-premature-spend-of-coinbase', reconnect=True)
# Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
# \-> b24 (6) -> b25 (7)
# \-> b3 (1) -> b4 (2)
self.log.info("Accept a block of size MAX_BLOCK_BASE_SIZE")
self.move_tip(15)
b23 = self.next_block(23, spend=out[6])
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0)))
b23 = self.update_block(23, [tx])
# Make sure the math above worked out to produce a max-sized block
assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE)
self.send_blocks([b23], True)
self.save_spendable_output()
self.log.info("Reject a block of size MAX_BLOCK_BASE_SIZE + 1")
self.move_tip(15)
b24 = self.next_block(24, spend=out[6])
script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69
script_output = CScript([b'\x00' * (script_length + 1)])
tx.vout = [CTxOut(0, script_output)]
b24 = self.update_block(24, [tx])
assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE + 1)
self.send_blocks([b24], success=False, reject_reason='bad-blk-length', reconnect=True)
b25 = self.next_block(25, spend=out[7])
self.send_blocks([b25], False)
# Create blocks with a coinbase input script size out of range
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
# \-> ... (6) -> ... (7)
# \-> b3 (1) -> b4 (2)
self.log.info("Reject a block with coinbase input script size out of range")
self.move_tip(15)
b26 = self.next_block(26, spend=out[6])
b26.vtx[0].vin[0].scriptSig = b'\x00'
b26.vtx[0].rehash()
# update_block causes the merkle root to get updated, even with no new
# transactions, and updates the required state.
b26 = self.update_block(26, [])
self.send_blocks([b26], success=False, reject_reason='bad-cb-length', reconnect=True)
# Extend the b26 chain to make sure bsvcoind isn't accepting b26
b27 = self.next_block(27, spend=out[7])
self.send_blocks([b27], False)
# Now try a too-large-coinbase script
self.move_tip(15)
b28 = self.next_block(28, spend=out[6])
b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
b28.vtx[0].rehash()
b28 = self.update_block(28, [])
self.send_blocks([b28], success=False, reject_reason='bad-cb-length', reconnect=True)
# Extend the b28 chain to make sure bsvcoind isn't accepting b28
b29 = self.next_block(29, spend=out[7])
self.send_blocks([b29], False)
# b30 has a max-sized coinbase scriptSig.
self.move_tip(23)
b30 = self.next_block(30)
b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
b30.vtx[0].rehash()
b30 = self.update_block(30, [])
self.send_blocks([b30], True)
self.save_spendable_output()
# b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY
#
# genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
# \-> b36 (11)
# \-> b34 (10)
# \-> b32 (9)
#
# MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end.
self.log.info("Accept a block with the max number of OP_CHECKMULTISIG sigops")
lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19)
b31 = self.next_block(31, spend=out[8], script=lots_of_multisigs)
assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS)
self.send_blocks([b31], True)
self.save_spendable_output()
# this goes over the limit because the coinbase has one sigop
self.log.info("Reject a block with too many OP_CHECKMULTISIG sigops")
too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20))
b32 = self.next_block(32, spend=out[9], script=too_many_multisigs)
assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1)
self.send_blocks([b32], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# CHECKMULTISIGVERIFY
self.log.info("Accept a block with the max number of OP_CHECKMULTISIGVERIFY sigops")
self.move_tip(31)
lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19)
b33 = self.next_block(33, spend=out[9], script=lots_of_multisigs)
self.send_blocks([b33], True)
self.save_spendable_output()
self.log.info("Reject a block with too many OP_CHECKMULTISIGVERIFY sigops")
too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20))
b34 = self.next_block(34, spend=out[10], script=too_many_multisigs)
self.send_blocks([b34], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# CHECKSIGVERIFY
self.log.info("Accept a block with the max number of OP_CHECKSIGVERIFY sigops")
self.move_tip(33)
lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1))
b35 = self.next_block(35, spend=out[10], script=lots_of_checksigs)
self.send_blocks([b35], True)
self.save_spendable_output()
self.log.info("Reject a block with too many OP_CHECKSIGVERIFY sigops")
too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS))
b36 = self.next_block(36, spend=out[11], script=too_many_checksigs)
self.send_blocks([b36], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# Check spending of a transaction in a block which failed to connect
#
# b6 (3)
# b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
# \-> b37 (11)
# \-> b38 (11/37)
#
# save 37's spendable output, but then double-spend out11 to invalidate the block
self.log.info("Reject a block spending transaction from a block which failed to connect")
self.move_tip(35)
b37 = self.next_block(37, spend=out[11])
txout_b37 = b37.vtx[1]
tx = self.create_and_sign_transaction(out[11], 0)
b37 = self.update_block(37, [tx])
self.send_blocks([b37], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid
self.move_tip(35)
b38 = self.next_block(38, spend=txout_b37)
self.send_blocks([b38], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# Check P2SH SigOp counting
#
#
# 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12)
# \-> b40 (12)
#
# b39 - create some P2SH outputs that will require 6 sigops to spend:
#
# redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG
# p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL
#
self.log.info("Check P2SH SIGOPS are correctly counted")
self.move_tip(35)
b39 = self.next_block(39)
b39_outputs = 0
b39_sigops_per_output = 6
# Build the redeem script, hash it, use hash to create the p2sh script
redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG])
redeem_script_hash = hash160(redeem_script)
p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])
# Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE
# This must be signed because it is spending a coinbase
spend = out[11]
tx = self.create_tx(spend, 0, 1, p2sh_script)
tx.vout.append(CTxOut(spend.vout[0].nValue - 1, CScript([OP_TRUE])))
self.sign_tx(tx, spend)
tx.rehash()
b39 = self.update_block(39, [tx])
b39_outputs += 1
# Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE
tx_new = None
tx_last = tx
total_size = len(b39.serialize())
while(total_size < MAX_BLOCK_BASE_SIZE):
tx_new = self.create_tx(tx_last, 1, 1, p2sh_script)
tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE])))
tx_new.rehash()
total_size += len(tx_new.serialize())
if total_size >= MAX_BLOCK_BASE_SIZE:
break
b39.vtx.append(tx_new) # add tx to block
tx_last = tx_new
b39_outputs += 1
# The accounting in the loop above can be off, because it misses the
# compact size encoding of the number of transactions in the block.
# Make sure we didn't accidentally make too big a block. Note that the
# size of the block has non-determinism due to the ECDSA signature in
# the first transaction.
while (len(b39.serialize()) >= MAX_BLOCK_BASE_SIZE):
del b39.vtx[-1]
b39 = self.update_block(39, [])
self.send_blocks([b39], True)
self.save_spendable_output()
# Test sigops in P2SH redeem scripts
#
# b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops.
# The first tx has one sigop and then at the end we add 2 more to put us just over the max.
#
# b41 does the same, less one, so it has the maximum sigops permitted.
#
self.log.info("Reject a block with too many P2SH sigops")
self.move_tip(39)
b40 = self.next_block(40, spend=out[12])
sigops = get_legacy_sigopcount_block(b40)
numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output
assert_equal(numTxes <= b39_outputs, True)
lastOutpoint = COutPoint(b40.vtx[1].sha256, 0)
new_txs = []
for i in range(1, numTxes + 1):
tx = CTransaction()
tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
tx.vin.append(CTxIn(lastOutpoint, b''))
# second input is corresponding P2SH output from b39
tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b''))
# Note: must pass the redeem_script (not p2sh_script) to the signature hash function
(sighash, err) = LegacySignatureHash(redeem_script, tx, 1, SIGHASH_ALL)
sig = self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL]))
scriptSig = CScript([sig, redeem_script])
tx.vin[1].scriptSig = scriptSig
tx.rehash()
new_txs.append(tx)
lastOutpoint = COutPoint(tx.sha256, 0)
b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1
tx = CTransaction()
tx.vin.append(CTxIn(lastOutpoint, b''))
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill)))
tx.rehash()
new_txs.append(tx)
self.update_block(40, new_txs)
self.send_blocks([b40], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# same as b40, but one less sigop
self.log.info("Accept a block with the max number of P2SH sigops")
self.move_tip(39)
b41 = self.next_block(41, spend=None)
self.update_block(41, b40.vtx[1:-1])
b41_sigops_to_fill = b40_sigops_to_fill - 1
tx = CTransaction()
tx.vin.append(CTxIn(lastOutpoint, b''))
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill)))
tx.rehash()
self.update_block(41, [tx])
self.send_blocks([b41], True)
# Fork off of b39 to create a constant base again
#
# b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13)
# \-> b41 (12)
#
self.move_tip(39)
b42 = self.next_block(42, spend=out[12])
self.save_spendable_output()
b43 = self.next_block(43, spend=out[13])
self.save_spendable_output()
self.send_blocks([b42, b43], True)
# Test a number of really invalid scenarios
#
# -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14)
# \-> ??? (15)
# The next few blocks are going to be created "by hand" since they'll do funky things, such as having
# the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works.
self.log.info("Build block 44 manually")
height = self.block_heights[self.tip.sha256] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
b44 = CBlock()
b44.nTime = self.tip.nTime + 1
b44.hashPrevBlock = self.tip.sha256
b44.nBits = 0x207fffff
b44.vtx.append(coinbase)
b44.hashMerkleRoot = b44.calc_merkle_root()
b44.solve()
self.tip = b44
self.block_heights[b44.sha256] = height
self.blocks[44] = b44
self.send_blocks([b44], True)
self.log.info("Reject a block with a non-coinbase as the first tx")
non_coinbase = self.create_tx(out[15], 0, 1)
b45 = CBlock()
b45.nTime = self.tip.nTime + 1
b45.hashPrevBlock = self.tip.sha256
b45.nBits = 0x207fffff
b45.vtx.append(non_coinbase)
b45.hashMerkleRoot = b45.calc_merkle_root()
b45.calc_sha256()
b45.solve()
self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256] + 1
self.tip = b45
self.blocks[45] = b45
self.send_blocks([b45], success=False, reject_reason='bad-cb-missing', reconnect=True)
self.log.info("Reject a block with no transactions")
self.move_tip(44)
b46 = CBlock()
b46.nTime = b44.nTime + 1
b46.hashPrevBlock = b44.sha256
b46.nBits = 0x207fffff
b46.vtx = []
b46.hashMerkleRoot = 0
b46.solve()
self.block_heights[b46.sha256] = self.block_heights[b44.sha256] + 1
self.tip = b46
assert 46 not in self.blocks
self.blocks[46] = b46
self.send_blocks([b46], success=False, reject_reason='bad-blk-length', reconnect=True)
self.log.info("Reject a block with invalid work")
self.move_tip(44)
b47 = self.next_block(47)
target = uint256_from_compact(b47.nBits)
while b47.sha256 <= target:
# Rehash nonces until an invalid too-high-hash block is found.
b47.nNonce += 1
b47.rehash()
self.send_blocks([b47], False, force_send=True, reject_reason='high-hash', reconnect=True)
self.log.info("Reject a block with a timestamp >2 hours in the future")
self.move_tip(44)
b48 = self.next_block(48)
b48.nTime = int(time.time()) + 60 * 60 * 3
# Header timestamp has changed. Re-solve the block.
b48.solve()
self.send_blocks([b48], False, force_send=True, reject_reason='time-too-new')
self.log.info("Reject a block with invalid merkle hash")
self.move_tip(44)
b49 = self.next_block(49)
b49.hashMerkleRoot += 1
b49.solve()
self.send_blocks([b49], success=False, reject_reason='bad-txnmrklroot', reconnect=True)
self.log.info("Reject a block with incorrect POW limit")
self.move_tip(44)
b50 = self.next_block(50)
b50.nBits = b50.nBits - 1
b50.solve()
self.send_blocks([b50], False, force_send=True, reject_reason='bad-diffbits', reconnect=True)
self.log.info("Reject a block with two coinbase transactions")
self.move_tip(44)
b51 = self.next_block(51)
cb2 = create_coinbase(51, self.coinbase_pubkey)
b51 = self.update_block(51, [cb2])
self.send_blocks([b51], success=False, reject_reason='bad-cb-multiple', reconnect=True)
self.log.info("Reject a block with duplicate transactions")
# Note: txns have to be in the right position in the merkle tree to trigger this error
self.move_tip(44)
b52 = self.next_block(52, spend=out[15])
tx = self.create_tx(b52.vtx[1], 0, 1)
b52 = self.update_block(52, [tx, tx])
self.send_blocks([b52], success=False, reject_reason='bad-txns-duplicate', reconnect=True)
# Test block timestamps
# -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15)
# \-> b54 (15)
#
self.move_tip(43)
b53 = self.next_block(53, spend=out[14])
self.send_blocks([b53], False)
self.save_spendable_output()
self.log.info("Reject a block with timestamp before MedianTimePast")
b54 = self.next_block(54, spend=out[15])
b54.nTime = b35.nTime - 1
b54.solve()
self.send_blocks([b54], False, force_send=True, reject_reason='time-too-old', reconnect=True)
# valid timestamp
self.move_tip(53)
b55 = self.next_block(55, spend=out[15])
b55.nTime = b35.nTime
self.update_block(55, [])
self.send_blocks([b55], True)
self.save_spendable_output()
# Test Merkle tree malleability
#
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16)
# \-> b57 (16)
# \-> b56p2 (16)
# \-> b56 (16)
#
# Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
# affecting the merkle root of a block, while still invalidating it.
# See: src/consensus/merkle.h
#
# b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx.
# Result: OK
#
# b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle
# root but duplicate transactions.
# Result: Fails
#
# b57p2 has six transactions in its merkle tree:
# - coinbase, tx, tx1, tx2, tx3, tx4
# Merkle root calculation will duplicate as necessary.
# Result: OK.
#
# b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches
# duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates
# that the error was caught early, avoiding a DOS vulnerability.)
# b57 - a good block with 2 txs, don't submit until end
self.move_tip(55)
b57 = self.next_block(57)
tx = self.create_and_sign_transaction(out[16], 1)
tx1 = self.create_tx(tx, 0, 1)
b57 = self.update_block(57, [tx, tx1])
# b56 - copy b57, add a duplicate tx
self.log.info("Reject a block with a duplicate transaction in the Merkle Tree (but with a valid Merkle Root)")
self.move_tip(55)
b56 = copy.deepcopy(b57)
self.blocks[56] = b56
assert_equal(len(b56.vtx), 3)
b56 = self.update_block(56, [tx1])
assert_equal(b56.hash, b57.hash)
self.send_blocks([b56], success=False, reject_reason='bad-txns-duplicate', reconnect=True)
# b57p2 - a good block with 6 tx'es, don't submit until end
self.move_tip(55)
b57p2 = self.next_block("57p2")
tx = self.create_and_sign_transaction(out[16], 1)
tx1 = self.create_tx(tx, 0, 1)
tx2 = self.create_tx(tx1, 0, 1)
tx3 = self.create_tx(tx2, 0, 1)
tx4 = self.create_tx(tx3, 0, 1)
b57p2 = self.update_block("57p2", [tx, tx1, tx2, tx3, tx4])
# b56p2 - copy b57p2, duplicate two non-consecutive tx's
self.log.info("Reject a block with two duplicate transactions in the Merkle Tree (but with a valid Merkle Root)")
self.move_tip(55)
b56p2 = copy.deepcopy(b57p2)
self.blocks["b56p2"] = b56p2
assert_equal(b56p2.hash, b57p2.hash)
assert_equal(len(b56p2.vtx), 6)
b56p2 = self.update_block("b56p2", [tx3, tx4])
self.send_blocks([b56p2], success=False, reject_reason='bad-txns-duplicate', reconnect=True)
self.move_tip("57p2")
self.send_blocks([b57p2], True)
self.move_tip(57)
self.send_blocks([b57], False) # The tip is not updated because 57p2 seen first
self.save_spendable_output()
# Test a few invalid tx types
#
# -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 ()
# \-> ??? (17)
#
# tx with prevout.n out of range
self.log.info("Reject a block with a transaction with prevout.n out of range")
self.move_tip(57)
b58 = self.next_block(58, spend=out[17])
tx = CTransaction()
assert len(out[17].vout) < 42
tx.vin.append(CTxIn(COutPoint(out[17].sha256, 42), CScript([OP_TRUE]), 0xffffffff))
tx.vout.append(CTxOut(0, b""))
tx.calc_sha256()
b58 = self.update_block(58, [tx])
self.send_blocks([b58], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# tx with output value > input value
self.log.info("Reject a block with a transaction with outputs > inputs")
self.move_tip(57)
b59 = self.next_block(59)
tx = self.create_and_sign_transaction(out[17], 51 * COIN)
b59 = self.update_block(59, [tx])
self.send_blocks([b59], success=False, reject_reason='bad-txns-in-belowout', reconnect=True)
# reset to good chain
self.move_tip(57)
b60 = self.next_block(60)
self.send_blocks([b60], True)
self.save_spendable_output()
# Test BIP30 (reject duplicate)
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 ()
# \-> b61 ()
#
# Blocks are not allowed to contain a transaction whose id matches that of an earlier,
# not-fully-spent transaction in the same chain. To test, make identical coinbases;
# the second one should be rejected. See also CVE-2012-1909.
#
self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)")
self.move_tip(60)
b61 = self.next_block(61)
b61.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG
b61.vtx[0].rehash()
b61 = self.update_block(61, [])
assert_equal(duplicate_tx.serialize(), b61.vtx[0].serialize())
self.send_blocks([b61], success=False, reject_reason='bad-txns-BIP30', reconnect=True)
# Test BIP30 (allow duplicate if spent)
#
# -> b57 (16) -> b60 ()
# \-> b_spend_dup_cb (b_dup_cb) -> b_dup_2 ()
#
self.move_tip(57)
b_spend_dup_cb = self.next_block('spend_dup_cb')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(duplicate_tx.sha256, 0)))
tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
self.sign_tx(tx, duplicate_tx)
tx.rehash()
b_spend_dup_cb = self.update_block('spend_dup_cb', [tx])
b_dup_2 = self.next_block('dup_2')
b_dup_2.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG
b_dup_2.vtx[0].rehash()
b_dup_2 = self.update_block('dup_2', [])
assert_equal(duplicate_tx.serialize(), b_dup_2.vtx[0].serialize())
assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 119)
self.send_blocks([b_spend_dup_cb, b_dup_2], success=True)
# The duplicate has less confirmations
assert_equal(self.nodes[0].gettxout(txid=duplicate_tx.hash, n=0)['confirmations'], 1)
# Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests)
#
# -> b_spend_dup_cb (b_dup_cb) -> b_dup_2 ()
# \-> b62 (18)
#
self.log.info("Reject a block with a transaction with a nonfinal locktime")
self.move_tip('dup_2')
b62 = self.next_block(62)
tx = CTransaction()
tx.nLockTime = 0xffffffff # this locktime is non-final
tx.vin.append(CTxIn(COutPoint(out[18].sha256, 0))) # don't set nSequence
tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
assert tx.vin[0].nSequence < 0xffffffff
tx.calc_sha256()
b62 = self.update_block(62, [tx])
self.send_blocks([b62], success=False, reject_reason='bad-txns-nonfinal', reconnect=True)
# Test a non-final coinbase is also rejected
#
# -> b_spend_dup_cb (b_dup_cb) -> b_dup_2 ()
# \-> b63 (-)
#
self.log.info("Reject a block with a coinbase transaction with a nonfinal locktime")
self.move_tip('dup_2')
b63 = self.next_block(63)
b63.vtx[0].nLockTime = 0xffffffff
b63.vtx[0].vin[0].nSequence = 0xDEADBEEF
b63.vtx[0].rehash()
b63 = self.update_block(63, [])
self.send_blocks([b63], success=False, reject_reason='bad-txns-nonfinal', reconnect=True)
# This checks that a block with a bloated VARINT between the block_header and the array of tx such that
# the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint,
# does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not
# care whether the bloated block is accepted or rejected; it only cares that the second block is accepted.
#
# What matters is that the receiving node should not reject the bloated block, and then reject the canonical
# block on the basis that it's the same as an already-rejected block (which would be a consensus failure.)
#
# -> b_spend_dup_cb (b_dup_cb) -> b_dup_2 () -> b64 (18)
# \
# b64a (18)
# b64a is a bloated block (non-canonical varint)
# b64 is a good block (same as b64 but w/ canonical varint)
#
self.log.info("Accept a valid block even if a bloated version of the block has previously been sent")
self.move_tip('dup_2')
regular_block = self.next_block("64a", spend=out[18])
# make it a "broken_block," with non-canonical serialization
b64a = CBrokenBlock(regular_block)
b64a.initialize(regular_block)
self.blocks["64a"] = b64a
self.tip = b64a
tx = CTransaction()
# use canonical serialization to calculate size
script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
b64a = self.update_block("64a", [tx])
assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8)
self.send_blocks([b64a], success=False, reject_reason='non-canonical ReadCompactSize()')
# bsvcoind doesn't disconnect us for sending a bloated block, but if we subsequently
# resend the header message, it won't send us the getdata message again. Just
# disconnect and reconnect and then call sync_blocks.
# TODO: improve this test to be less dependent on P2P DOS behaviour.
node.disconnect_p2ps()
self.reconnect_p2p()
self.move_tip('dup_2')
b64 = CBlock(b64a)
b64.vtx = copy.deepcopy(b64a.vtx)
assert_equal(b64.hash, b64a.hash)
assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE)
self.blocks[64] = b64
b64 = self.update_block(64, [])
self.send_blocks([b64], True)
self.save_spendable_output()
# Spend an output created in the block itself
#
# -> b_dup_2 () -> b64 (18) -> b65 (19)
#
self.log.info("Accept a block with a transaction spending an output created in the same block")
self.move_tip(64)
b65 = self.next_block(65)
tx1 = self.create_and_sign_transaction(out[19], out[19].vout[0].nValue)
tx2 = self.create_and_sign_transaction(tx1, 0)
b65 = self.update_block(65, [tx1, tx2])
self.send_blocks([b65], True)
self.save_spendable_output()
# Attempt to spend an output created later in the same block
#
# -> b64 (18) -> b65 (19)
# \-> b66 (20)
self.log.info("Reject a block with a transaction spending an output created later in the same block")
self.move_tip(65)
b66 = self.next_block(66)
tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue)
tx2 = self.create_and_sign_transaction(tx1, 1)
b66 = self.update_block(66, [tx2, tx1])
self.send_blocks([b66], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to double-spend a transaction created in a block
#
# -> b64 (18) -> b65 (19)
# \-> b67 (20)
#
#
self.log.info("Reject a block with a transaction double spending a transaction created in the same block")
self.move_tip(65)
b67 = self.next_block(67)
tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue)
tx2 = self.create_and_sign_transaction(tx1, 1)
tx3 = self.create_and_sign_transaction(tx1, 2)
b67 = self.update_block(67, [tx1, tx2, tx3])
self.send_blocks([b67], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# More tests of block subsidy
#
# -> b64 (18) -> b65 (19) -> b69 (20)
# \-> b68 (20)
#
# b68 - coinbase with an extra 10 satoshis,
# creates a tx that has 9 satoshis from out[20] go to fees
# this fails because the coinbase is trying to claim 1 satoshi too much in fees
#
# b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee
# this succeeds
#
self.log.info("Reject a block trying to claim too much subsidy in the coinbase transaction")
self.move_tip(65)
b68 = self.next_block(68, additional_coinbase_value=10)
tx = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue - 9)
b68 = self.update_block(68, [tx])
self.send_blocks([b68], success=False, reject_reason='bad-cb-amount', reconnect=True)
self.log.info("Accept a block claiming the correct subsidy in the coinbase transaction")
self.move_tip(65)
b69 = self.next_block(69, additional_coinbase_value=10)
tx = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue - 10)
self.update_block(69, [tx])
self.send_blocks([b69], True)
self.save_spendable_output()
# Test spending the outpoint of a non-existent transaction
#
# -> b65 (19) -> b69 (20)
# \-> b70 (21)
#
self.log.info("Reject a block containing a transaction spending from a non-existent input")
self.move_tip(69)
b70 = self.next_block(70, spend=out[21])
bogus_tx = CTransaction()
bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c")
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff))
tx.vout.append(CTxOut(1, b""))
b70 = self.update_block(70, [tx])
self.send_blocks([b70], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
# Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks)
#
# -> b65 (19) -> b69 (20) -> b72 (21)
# \-> b71 (21)
#
# b72 is a good block.
# b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b72.
self.log.info("Reject a block containing a duplicate transaction but with the same Merkle root (Merkle tree malleability")
self.move_tip(69)
b72 = self.next_block(72)
tx1 = self.create_and_sign_transaction(out[21], 2)
tx2 = self.create_and_sign_transaction(tx1, 1)
b72 = self.update_block(72, [tx1, tx2]) # now tip is 72
b71 = copy.deepcopy(b72)
b71.vtx.append(tx2) # add duplicate tx2
self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69
self.blocks[71] = b71
assert_equal(len(b71.vtx), 4)
assert_equal(len(b72.vtx), 3)
assert_equal(b72.sha256, b71.sha256)
self.move_tip(71)
self.send_blocks([b71], success=False, reject_reason='bad-txns-duplicate', reconnect=True)
self.move_tip(72)
self.send_blocks([b72], True)
self.save_spendable_output()
# Test some invalid scripts and MAX_BLOCK_SIGOPS
#
# -> b69 (20) -> b72 (21)
# \-> b** (22)
#
# b73 - tx with excessive sigops that are placed after an excessively large script element.
# The purpose of the test is to make sure those sigops are counted.
#
# script is a bytearray of size 20,526
#
# bytearray[0-19,998] : OP_CHECKSIG
# bytearray[19,999] : OP_PUSHDATA4
# bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format)
# bytearray[20,004-20,525]: unread data (script_element)
# bytearray[20,526] : OP_CHECKSIG (this puts us over the limit)
self.log.info("Reject a block containing too many sigops after a large script element")
self.move_tip(72)
b73 = self.next_block(73)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS - 1] = int("4e", 16) # OP_PUSHDATA4
element_size = MAX_SCRIPT_ELEMENT_SIZE + 1
a[MAX_BLOCK_SIGOPS] = element_size % 256
a[MAX_BLOCK_SIGOPS + 1] = element_size // 256
a[MAX_BLOCK_SIGOPS + 2] = 0
a[MAX_BLOCK_SIGOPS + 3] = 0
tx = self.create_and_sign_transaction(out[22], 1, CScript(a))
b73 = self.update_block(73, [tx])
assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS + 1)
self.send_blocks([b73], success=False, reject_reason='bad-blk-sigops', reconnect=True)
# b74/75 - if we push an invalid script element, all previous sigops are counted,
# but sigops after the element are not counted.
#
# The invalid script element is that the push_data indicates that
# there will be a large amount of data (0xffffff bytes), but we only
# provide a much smaller number. These bytes are CHECKSIGS so they would
# cause b75 to fail for excessive sigops, if those bytes were counted.
#
# b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element
# b75 succeeds because we put MAX_BLOCK_SIGOPS before the element
self.log.info("Check sigops are counted correctly after an invalid script element")
self.move_tip(72)
b74 = self.next_block(74)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS] = 0x4e
a[MAX_BLOCK_SIGOPS + 1] = 0xfe
a[MAX_BLOCK_SIGOPS + 2] = 0xff
a[MAX_BLOCK_SIGOPS + 3] = 0xff
a[MAX_BLOCK_SIGOPS + 4] = 0xff
tx = self.create_and_sign_transaction(out[22], 1, CScript(a))
b74 = self.update_block(74, [tx])
self.send_blocks([b74], success=False, reject_reason='bad-blk-sigops', reconnect=True)
self.move_tip(72)
b75 = self.next_block(75)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS - 1] = 0x4e
a[MAX_BLOCK_SIGOPS] = 0xff
a[MAX_BLOCK_SIGOPS + 1] = 0xff
a[MAX_BLOCK_SIGOPS + 2] = 0xff
a[MAX_BLOCK_SIGOPS + 3] = 0xff
tx = self.create_and_sign_transaction(out[22], 1, CScript(a))
b75 = self.update_block(75, [tx])
self.send_blocks([b75], True)
self.save_spendable_output()
# Check that if we push an element filled with CHECKSIGs, they are not counted
self.move_tip(75)
b76 = self.next_block(76)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS - 1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs
tx = self.create_and_sign_transaction(out[23], 1, CScript(a))
b76 = self.update_block(76, [tx])
self.send_blocks([b76], True)
self.save_spendable_output()
# Test transaction resurrection
#
# -> b77 (24) -> b78 (25) -> b79 (26)
# \-> b80 (25) -> b81 (26) -> b82 (27)
#
# b78 creates a tx, which is spent in b79. After b82, both should be in mempool
#
# The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the
# rather obscure reason that the Python signature code does not distinguish between
# Low-S and High-S values (whereas the bsvcoin code has custom code which does so);
# as a result of which, the odds are 50% that the python code will use the right
# value and the transaction will be accepted into the mempool. Until we modify the
# test framework to support low-S signing, we are out of luck.
#
# To get around this issue, we construct transactions which are not signed and which
# spend to OP_TRUE. If the standard-ness rules change, this test would need to be
# updated. (Perhaps to spend to a P2SH OP_TRUE script)
self.log.info("Test transaction resurrection during a re-org")
self.move_tip(76)
b77 = self.next_block(77)
tx77 = self.create_and_sign_transaction(out[24], 10 * COIN)
b77 = self.update_block(77, [tx77])
self.send_blocks([b77], True)
self.save_spendable_output()
b78 = self.next_block(78)
tx78 = self.create_tx(tx77, 0, 9 * COIN)
b78 = self.update_block(78, [tx78])
self.send_blocks([b78], True)
b79 = self.next_block(79)
tx79 = self.create_tx(tx78, 0, 8 * COIN)
b79 = self.update_block(79, [tx79])
self.send_blocks([b79], True)
# mempool should be empty
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.move_tip(77)
b80 = self.next_block(80, spend=out[25])
self.send_blocks([b80], False, force_send=True)
self.save_spendable_output()
b81 = self.next_block(81, spend=out[26])
self.send_blocks([b81], False, force_send=True) # other chain is same length
self.save_spendable_output()
b82 = self.next_block(82, spend=out[27])
self.send_blocks([b82], True) # now this chain is longer, triggers re-org
self.save_spendable_output()
# now check that tx78 and tx79 have been put back into the peer's mempool
mempool = self.nodes[0].getrawmempool()
assert_equal(len(mempool), 2)
assert tx78.hash in mempool
assert tx79.hash in mempool
# Test invalid opcodes in dead execution paths.
#
# -> b81 (26) -> b82 (27) -> b83 (28)
#
self.log.info("Accept a block with invalid opcodes in dead execution paths")
b83 = self.next_block(83)
op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
script = CScript(op_codes)
tx1 = self.create_and_sign_transaction(out[28], out[28].vout[0].nValue, script)
tx2 = self.create_and_sign_transaction(tx1, 0, CScript([OP_TRUE]))
tx2.vin[0].scriptSig = CScript([OP_FALSE])
tx2.rehash()
b83 = self.update_block(83, [tx1, tx2])
self.send_blocks([b83], True)
self.save_spendable_output()
# Reorg on/off blocks that have OP_RETURN in them (and try to spend them)
#
# -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31)
# \-> b85 (29) -> b86 (30) \-> b89a (32)
#
self.log.info("Test re-orging blocks with OP_RETURN in them")
b84 = self.next_block(84)
tx1 = self.create_tx(out[29], 0, 0, CScript([OP_RETURN]))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.calc_sha256()
self.sign_tx(tx1, out[29])
tx1.rehash()
tx2 = self.create_tx(tx1, 1, 0, CScript([OP_RETURN]))
tx2.vout.append(CTxOut(0, CScript([OP_RETURN])))
tx3 = self.create_tx(tx1, 2, 0, CScript([OP_RETURN]))
tx3.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx4 = self.create_tx(tx1, 3, 0, CScript([OP_TRUE]))
tx4.vout.append(CTxOut(0, CScript([OP_RETURN])))
tx5 = self.create_tx(tx1, 4, 0, CScript([OP_RETURN]))
b84 = self.update_block(84, [tx1, tx2, tx3, tx4, tx5])
self.send_blocks([b84], True)
self.save_spendable_output()
self.move_tip(83)
b85 = self.next_block(85, spend=out[29])
self.send_blocks([b85], False) # other chain is same length
b86 = self.next_block(86, spend=out[30])
self.send_blocks([b86], True)
self.move_tip(84)
b87 = self.next_block(87, spend=out[30])
self.send_blocks([b87], False) # other chain is same length
self.save_spendable_output()
b88 = self.next_block(88, spend=out[31])
self.send_blocks([b88], True)
self.save_spendable_output()
# trying to spend the OP_RETURN output is rejected
b89a = self.next_block("89a", spend=out[32])
tx = self.create_tx(tx1, 0, 0, CScript([OP_TRUE]))
b89a = self.update_block("89a", [tx])
self.send_blocks([b89a], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True)
self.log.info("Test a re-org of one week's worth of blocks (1088 blocks)")
self.move_tip(88)
LARGE_REORG_SIZE = 1088
blocks = []
spend = out[32]
for i in range(89, LARGE_REORG_SIZE + 89):
b = self.next_block(i, spend)
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
b = self.update_block(i, [tx])
assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE)
blocks.append(b)
self.save_spendable_output()
spend = self.get_spendable_output()
self.send_blocks(blocks, True, timeout=2440)
chain1_tip = i
# now create alt chain of same length
self.move_tip(88)
blocks2 = []
for i in range(89, LARGE_REORG_SIZE + 89):
blocks2.append(self.next_block("alt" + str(i)))
self.send_blocks(blocks2, False, force_send=True)
# extend alt chain to trigger re-org
block = self.next_block("alt" + str(chain1_tip + 1))
self.send_blocks([block], True, timeout=2440)
# ... and re-org back to the first chain
self.move_tip(chain1_tip)
block = self.next_block(chain1_tip + 1)
self.send_blocks([block], False, force_send=True)
block = self.next_block(chain1_tip + 2)
self.send_blocks([block], True, timeout=2440)
self.log.info("Reject a block with an invalid block header version")
b_v1 = self.next_block('b_v1', version=1)
self.send_blocks([b_v1], success=False, force_send=True, reject_reason='bad-version(0x00000001)', reconnect=True)
self.move_tip(chain1_tip + 2)
b_cb34 = self.next_block('b_cb34')
b_cb34.vtx[0].vin[0].scriptSig = b_cb34.vtx[0].vin[0].scriptSig[:-1]
b_cb34.vtx[0].rehash()
b_cb34.hashMerkleRoot = b_cb34.calc_merkle_root()
b_cb34.solve()
self.send_blocks([b_cb34], success=False, reject_reason='bad-cb-height', reconnect=True)
# Helper methods
################
def add_transactions_to_block(self, block, tx_list):
[tx.rehash() for tx in tx_list]
block.vtx.extend(tx_list)
# this is a little handier to use than the version in blocktools.py
def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])):
return create_tx_with_script(spend_tx, n, amount=value, script_pub_key=script)
# sign a transaction, using the key we know about
# this signs input 0 in tx, which is assumed to be spending output n in spend_tx
def sign_tx(self, tx, spend_tx):
scriptPubKey = bytearray(spend_tx.vout[0].scriptPubKey)
if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
tx.vin[0].scriptSig = CScript()
return
(sighash, err) = LegacySignatureHash(spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL)
tx.vin[0].scriptSig = CScript([self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL]))])
def create_and_sign_transaction(self, spend_tx, value, script=CScript([OP_TRUE])):
tx = self.create_tx(spend_tx, 0, value, script)
self.sign_tx(tx, spend_tx)
tx.rehash()
return tx
def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), *, version=4):
if self.tip is None:
base_block_hash = self.genesis_hash
block_time = int(time.time()) + 1
else:
base_block_hash = self.tip.sha256
block_time = self.tip.nTime + 1
# First create the coinbase
height = self.block_heights[base_block_hash] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
coinbase.vout[0].nValue += additional_coinbase_value
coinbase.rehash()
if spend is None:
block = create_block(base_block_hash, coinbase, block_time, version=version)
else:
coinbase.vout[0].nValue += spend.vout[0].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
block = create_block(base_block_hash, coinbase, block_time, version=version)
tx = self.create_tx(spend, 0, 1, script) # spend 1 satoshi
self.sign_tx(tx, spend)
self.add_transactions_to_block(block, [tx])
block.hashMerkleRoot = block.calc_merkle_root()
# Block is created. Find a valid nonce.
block.solve()
self.tip = block
self.block_heights[block.sha256] = height
assert number not in self.blocks
self.blocks[number] = block
return block
# save the current tip so it can be spent by a later block
def save_spendable_output(self):
self.log.debug(f"saving spendable output {self.tip.vtx[0]}")
self.spendable_outputs.append(self.tip)
# get an output that we previously marked as spendable
def get_spendable_output(self):
self.log.debug(f"getting spendable output {self.spendable_outputs[0].vtx[0]}")
return self.spendable_outputs.pop(0).vtx[0]
# move the tip back to a previous block
def move_tip(self, number):
self.tip = self.blocks[number]
# adds transactions to the block and updates state
def update_block(self, block_number, new_transactions):
block = self.blocks[block_number]
self.add_transactions_to_block(block, new_transactions)
old_sha256 = block.sha256
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
# Update the internal state just like in next_block
self.tip = block
if block.sha256 != old_sha256:
self.block_heights[block.sha256] = self.block_heights[old_sha256]
del self.block_heights[old_sha256]
self.blocks[block_number] = block
return block
def bootstrap_p2p(self, timeout=10):
"""Add a P2P connection to the node.
Helper to connect and wait for version handshake."""
self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore())
# We need to wait for the initial getheaders from the peer before we
# start populating our blockstore. If we don't, then we may run ahead
# to the next subtest before we receive the getheaders. We'd then send
# an INV for the next block and receive two getheaders - one for the
# IBD and one for the INV. We'd respond to both and could get
# unexpectedly disconnected if the DoS score for that error is 50.
self.helper_peer.wait_for_getheaders(timeout=timeout)
def reconnect_p2p(self, timeout=60):
"""Tear down and bootstrap the P2P connection to the node.
The node gets disconnected several times in this test. This helper
method reconnects the p2p and restarts the network thread."""
self.nodes[0].disconnect_p2ps()
self.bootstrap_p2p(timeout=timeout)
def send_blocks(self, blocks, success=True, reject_reason=None, force_send=False, reconnect=False, timeout=960):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
self.helper_peer.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, force_send=force_send, timeout=timeout, expect_disconnect=reconnect)
if reconnect:
self.reconnect_p2p(timeout=timeout)
if __name__ == '__main__':
FullBlockTest().main()
| 45.852609
| 183
| 0.593934
|
af38fa5a571d0819beeb283e9202f7c88428d851
| 3,363
|
py
|
Python
|
src/hltv_api/api/stats.py
|
hoangvu01/hltv_python
|
227947c7bfb2cbd475f1aefcda6447e677658820
|
[
"MIT"
] | 2
|
2021-12-22T16:10:40.000Z
|
2021-12-22T16:44:50.000Z
|
src/hltv_api/api/stats.py
|
hoangvu01/hltv_python
|
227947c7bfb2cbd475f1aefcda6447e677658820
|
[
"MIT"
] | null | null | null |
src/hltv_api/api/stats.py
|
hoangvu01/hltv_python
|
227947c7bfb2cbd475f1aefcda6447e677658820
|
[
"MIT"
] | null | null | null |
import os
from urllib.parse import urljoin
import pandas as pd
from lxml import html
from hltv_api.api.results import get_past_matches_ids
from hltv_api.client import HLTVClient
from hltv_api.common import HLTVConfig
from hltv_api.pages.matches import parse_match_page
from hltv_api.pages.stats import parse_map_stat_economy_page
from hltv_api.query import HLTVQuery
MATCH_COLUMNS = ["match_id", "map", "team_1_id", "team_2_id", "starting_ct"]
ROUNDS_COLUMNS = [col
for i in range(1, 31)
for col in [f"{i}_team_1_value", f"{i}_team_2_value", f"{i}_winner"]]
def get_matches_with_economy(skip=0, limit=None, batch_size=100, query=None, **kwargs):
"""Return a DataFrame containing
Parameter
---------
skip: Optional[int]
The number of results to be skipped from being returned.
If not specified, do not skip any records.
limit: Optional[int]
The maximum number of results to be returned.
If not specified, only return 100 records. This is the default number
of matches displayed per page on HLTV.
If NONE, return all the records found.
query: Optional[HLTVQuery]
Queries and filters for the data.
kwargs:
Arguments to `HLTVQuery` if `query` is `None`.
"""
query = query or HLTVQuery(**kwargs)
columns = MATCH_COLUMNS + ROUNDS_COLUMNS
df = pd.DataFrame(columns=columns)
while (limit is None) or (len(df) < limit):
batch_limit = batch_size if limit is None else min(batch_size, limit - len(df))
matches_ids = get_past_matches_ids(
skip=skip,
limit=batch_limit,
query=query
)
# Breaks if no result found
if len(matches_ids) == 0:
break
# Fetches match statistics using its ID
matches_stats = []
for match_id in matches_ids:
stats = get_economy_by_match_id(match_id)
if len(stats) == 0:
continue
for map_details in stats["maps"]:
pivoted = {**map_details, **stats}
matches_stats.append({k: v for k, v in pivoted.items() if k in columns})
df = df.append(matches_stats)
skip += len(matches_ids)
return df
def get_economy_by_match_id(match_id):
client = HLTVClient()
# URL requires the event name but does not matter if it is
# not the event corresponding to the ID
match_id = str(match_id)
match_uri = os.path.join(HLTVConfig["matches_uri"], match_id, "foo")
match_url = urljoin(HLTVConfig["base_url"], match_uri)
response = client.get(match_url)
match_page = html.fromstring(response.text)
match_details = parse_match_page(match_page)
if match_details != {}:
match_details["maps"] = [{
**map_played,
**get_economy_by_map_stats_id(map_played["map_stats_id"])
} for map_played in match_details["maps"]]
return match_details
def get_economy_by_map_stats_id(map_stats_id):
client = HLTVClient()
map_stats_uri = os.path.join(HLTVConfig["economy_uri"], str(map_stats_id), "foo")
map_stats_url = urljoin(HLTVConfig["base_url"], map_stats_uri)
map_stat_response = client.get(map_stats_url)
tree = html.fromstring(map_stat_response.text)
return parse_map_stat_economy_page(tree)
| 31.138889
| 88
| 0.663693
|
2db5da5aeec7dd84dcbb8d33922cf052d7495c34
| 3,622
|
py
|
Python
|
tests/kernels/test_split.py
|
MammothStack/Lyteflow
|
63a60f593b954f8a224fcd4864fe6af82d20dd91
|
[
"MIT"
] | 1
|
2021-08-25T02:43:19.000Z
|
2021-08-25T02:43:19.000Z
|
tests/kernels/test_split.py
|
MammothStack/Lyteflow
|
63a60f593b954f8a224fcd4864fe6af82d20dd91
|
[
"MIT"
] | null | null | null |
tests/kernels/test_split.py
|
MammothStack/Lyteflow
|
63a60f593b954f8a224fcd4864fe6af82d20dd91
|
[
"MIT"
] | null | null | null |
# Third party imports
import pytest
import warnings
# Local imports
from lyteflow.construct import PipeSystem
from lyteflow.kernels.split import *
from lyteflow.kernels.io import *
from lyteflow.kernels.merge import *
@pytest.fixture()
def dupe_pipe():
in_1 = Inlet(convert=True, name="in_2")
x = Duplicator(n_result=3)(in_1)
x = Concatenator(axis=1)(x)
out_1 = Outlet(name="out_2")(x)
return PipeSystem(inlets=[in_1], outlets=[out_1], name="ps")
@pytest.fixture()
def dupe_pipe_invalid():
in_1 = Inlet(convert=True, name="in_2")
x = Duplicator(n_result=3)(in_1)
x = Concatenator(axis=1)(x, x)
out_1 = Outlet(name="out_2")(x)
return PipeSystem(inlets=[in_1], outlets=[out_1])
@pytest.fixture()
def simple_data_frame():
return pd.DataFrame([[1, 2], [3, 4], [5, 6]], columns=["a", "b"])
@pytest.fixture()
def data_frame():
return pd.DataFrame([[1, 2, 3, 4], [1, 2, 3, 4]], columns=["a", "b", "c", "d"])
class TestDuplicator:
def test_transform(self, simple_data_frame):
expected = pd.DataFrame([[1, 2], [3, 4], [5, 6]], columns=["a", "b"])
res = Duplicator(n_result=2, name="dupe").transform(simple_data_frame)
assert all([(i == expected).all().all() for i in res])
def test_init_invalid_n_result(self):
with pytest.raises(ValueError):
Duplicator(n_result=0, name="dupe")
def test_init_valid_n_result(self):
Duplicator(n_result=None, name="dupe")
def test_init_invalid_n_result_1(self):
with pytest.raises(TypeError):
Duplicator(n_result="a", name="dupe")
def test_valid_setup(self, dupe_pipe, simple_data_frame):
expected = pd.DataFrame(
[[1, 2, 1, 2, 1, 2], [3, 4, 3, 4, 3, 4], [5, 6, 5, 6, 5, 6]],
columns=["a", "b", "a", "b", "a", "b"],
)
assert (expected == dupe_pipe.flow(simple_data_frame)[0]).all().all()
def test_invalid_setup(self, dupe_pipe_invalid, simple_data_frame):
with pytest.warns(UserWarning):
dupe_pipe_invalid.flow(simple_data_frame)
def test_invalid_setup_1(self, dupe_pipe_invalid, simple_data_frame):
expected = pd.DataFrame(
[[1, 2, 1, 2], [3, 4, 3, 4], [5, 6, 5, 6]], columns=["a", "b", "a", "b"]
)
assert (expected == dupe_pipe_invalid.flow(simple_data_frame)[0]).all().all()
class TestColumnSplitter:
def test_standard_split(self, simple_data_frame):
cs = ColumnSplitter(columns=["a"])
expected = pd.DataFrame([[1], [3], [5]], columns=["a"])
assert (expected == cs.transform(simple_data_frame)[0]).all().all()
def test_standard_split_split_rest(self, simple_data_frame):
cs = ColumnSplitter(columns=["a"], split_rest=True)
expected_a = pd.DataFrame([[1], [3], [5]], columns=["a"])
expected_b = pd.DataFrame([[2], [4], [6]], columns=["b"])
tr = cs.transform(simple_data_frame)
assert (expected_a == tr[0]).all().all() and (expected_b == tr[1]).all().all()
def test_split_non_existent_columns(self, data_frame):
cs = ColumnSplitter(columns=[["a", "b"], ["d", "f"]], split_rest=True)
expected_a = pd.DataFrame([[1, 2], [1, 2]], columns=["a", "b"])
expected_b = pd.DataFrame([[4], [4]], columns=["d"])
tr = cs.transform(data_frame)
assert (expected_a == tr[0]).all().all() and (expected_b == tr[1]).all().all()
def test_split_no_columns(self, data_frame):
cs = ColumnSplitter(["e"])
expected = pd.DataFrame(index=[0, 1])
assert (expected == cs.transform(data_frame)[0]).all().all()
| 36.585859
| 86
| 0.615406
|
4cf223002be3c7fda49c43b0228b48ca61715157
| 442
|
py
|
Python
|
PyOpenGL-3.0.2/OpenGL/raw/GL/SUN/convolution_border_modes.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
PyOpenGL-3.0.2/OpenGL/raw/GL/SUN/convolution_border_modes.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
PyOpenGL-3.0.2/OpenGL/raw/GL/SUN/convolution_border_modes.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
'''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.GL import glget
EXTENSION_NAME = 'GL_SUN_convolution_border_modes'
_p.unpack_constants( """GL_WRAP_BORDER_SUN 0x81D4""", globals())
def glInitConvolutionBorderModesSUN():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
| 36.833333
| 71
| 0.791855
|
d234d720f590bb37d0ffc5fdedb1a62b7085b3b7
| 303
|
py
|
Python
|
Python/write-a-function/write-a-function.py
|
rutendo-nyakutira/HackerRankSolutions
|
8aa3f922b8b2e06138ce1fc348b7634576b02b1f
|
[
"MIT"
] | null | null | null |
Python/write-a-function/write-a-function.py
|
rutendo-nyakutira/HackerRankSolutions
|
8aa3f922b8b2e06138ce1fc348b7634576b02b1f
|
[
"MIT"
] | null | null | null |
Python/write-a-function/write-a-function.py
|
rutendo-nyakutira/HackerRankSolutions
|
8aa3f922b8b2e06138ce1fc348b7634576b02b1f
|
[
"MIT"
] | null | null | null |
def is_leap(year):
leap = False
if(year%4 ==0):
if(year%100 ==0):
if(year%400 ==0):
leap = True
else:
leap = False
else:
leap = True
return leap
year = int(input())
print(is_leap(year))
| 18.9375
| 30
| 0.40264
|
4835f4e7463cfef7b5f7d198e3cbc413b0995572
| 1,009
|
py
|
Python
|
setup.py
|
lstillwe/Flask-GoogleMaps
|
a517fd7fc54f9438df524b80436697bef20a80d5
|
[
"MIT"
] | 1
|
2020-08-12T20:32:43.000Z
|
2020-08-12T20:32:43.000Z
|
setup.py
|
lstillwe/Flask-GoogleMaps
|
a517fd7fc54f9438df524b80436697bef20a80d5
|
[
"MIT"
] | null | null | null |
setup.py
|
lstillwe/Flask-GoogleMaps
|
a517fd7fc54f9438df524b80436697bef20a80d5
|
[
"MIT"
] | null | null | null |
# coding: utf8
from setuptools import setup, find_packages
setup(
name='Flask-GoogleMaps',
version='0.2.5',
license='MIT',
description='Small extension for Flask to make using Google Maps easy',
long_description=open('README.md').read(),
author='Bruno Rocha',
author_email='rochacbruno@gmail.com',
url='https://github.com/rochacbruno/Flask-GoogleMaps/',
platforms='any',
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
install_requires=['Flask'],
packages=find_packages()
)
| 32.548387
| 75
| 0.641229
|
0aeff6464afcdf619b2dc8172460e61069c54bfb
| 1,865
|
py
|
Python
|
test.py
|
StuSe/Multimodal-Shape-Completion
|
f053d0babdd223a7511911b8682afb80fbf42823
|
[
"MIT"
] | 75
|
2020-07-04T22:26:29.000Z
|
2022-03-28T06:07:50.000Z
|
test.py
|
StuSe/Multimodal-Shape-Completion
|
f053d0babdd223a7511911b8682afb80fbf42823
|
[
"MIT"
] | 7
|
2020-08-13T16:06:00.000Z
|
2021-07-30T02:07:44.000Z
|
test.py
|
StuSe/Multimodal-Shape-Completion
|
f053d0babdd223a7511911b8682afb80fbf42823
|
[
"MIT"
] | 11
|
2020-07-05T02:35:22.000Z
|
2022-01-18T10:52:21.000Z
|
from tqdm import tqdm
import torch
import os
from dataset import get_dataloader
from common import get_config
from agent import get_agent
from util.pc_utils import write_ply
from util.utils import cycle, ensure_dir
import random
random.seed(1856)
def main():
# create experiment config containing all hyperparameters
config = get_config('test')
# create network and training agent
tr_agent = get_agent(config)
# load from checkpoint
tr_agent.load_ckpt(config.ckpt)
tr_agent.eval()
# create dataloader
config.batch_size = 1
config.num_workers = 1
test_loader = get_dataloader('test', config)
num_test = len(test_loader)
print("total number of test samples: {}".format(num_test))
used_test = num_test if config.num_sample == -1 else config.num_sample
print("used number of test samples: {}".format(used_test))
test_loader = cycle(test_loader)
save_dir = os.path.join(config.exp_dir, "results/ckpt-{}-n{}-z{}".format(config.ckpt, used_test, config.num_z))
ensure_dir(save_dir)
# run
for i in tqdm(range(used_test)):
data = next(test_loader)
for j in range(config.num_z):
with torch.no_grad():
tr_agent.forward(data)
real_pts, fake_pts, raw_pts = tr_agent.get_point_cloud()
raw_id = data['raw_id'][0].split('.')[0]
save_sample_dir = os.path.join(save_dir, "{}".format(raw_id))
ensure_dir(save_sample_dir)
# save input partial shape
if j == 0:
save_path = os.path.join(save_sample_dir, "raw.ply")
write_ply(raw_pts[0], save_path)
# save completed shape
save_path = os.path.join(save_sample_dir, "fake-z{}.ply".format(j))
write_ply(fake_pts[0], save_path)
if __name__ == '__main__':
main()
| 30.080645
| 115
| 0.653619
|
4ce4c742406652d6eee4b0667c64c591e2fb152e
| 1,619
|
py
|
Python
|
setup.py
|
peterk87/guppywuppy
|
0d5ecdaa9c113a7c1a730e368ec319bcee38c05f
|
[
"MIT"
] | null | null | null |
setup.py
|
peterk87/guppywuppy
|
0d5ecdaa9c113a7c1a730e368ec319bcee38c05f
|
[
"MIT"
] | null | null | null |
setup.py
|
peterk87/guppywuppy
|
0d5ecdaa9c113a7c1a730e368ec319bcee38c05f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""The setup script."""
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = ['Click>=7.0',
'pyguppyclient',
'sanic',
'databases']
setup_requirements = ['pytest-runner', ]
test_requirements = ['pytest>=3', ]
setup(
author="Peter Kruczkiewicz",
author_email='peter.kruczkiewicz@gmail.com',
python_requires='>=3.6',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description="Service wrapping PyGuppyClient",
entry_points={
'console_scripts': [
'guppywuppy=guppywuppy.cli:main',
],
},
install_requires=requirements,
license="MIT license",
long_description=readme + '\n\n' + history,
include_package_data=True,
keywords='guppywuppy',
name='guppywuppy',
packages=find_packages(include=['guppywuppy', 'guppywuppy.*']),
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
url='https://github.com/peterk87/guppywuppy',
version='0.1.0',
zip_safe=False,
)
| 28.403509
| 67
| 0.623842
|
49582821d8be412a43a6568f74732364170945b0
| 313
|
py
|
Python
|
Chapter15/ex15.py
|
banerjeesamrat/LearnPythonTheHardWay.py
|
2f97c4060c0b13222db8991c3dda743bdafcad58
|
[
"MIT"
] | 2
|
2018-05-22T14:00:31.000Z
|
2022-01-17T20:20:00.000Z
|
Chapter15/ex15.py
|
banerjeesamrat/LearnPythonTheHardWay.py
|
2f97c4060c0b13222db8991c3dda743bdafcad58
|
[
"MIT"
] | null | null | null |
Chapter15/ex15.py
|
banerjeesamrat/LearnPythonTheHardWay.py
|
2f97c4060c0b13222db8991c3dda743bdafcad58
|
[
"MIT"
] | null | null | null |
"""
Author: Samrat Banerjee
Date: 25-05-2018
Description: Reading Files
"""
from sys import argv
script,filename=argv
txt=open(filename)
print("Here's your file %r:" %filename)
print(txt.read())
print("Type the filename again:")
file_again=input("> ")
txt_again=open(file_again)
print(txt_again.read())
| 12.52
| 39
| 0.71885
|
078d2e30ae6c4165c05e5c74471d6836b88cf3c4
| 7,045
|
py
|
Python
|
gigalixir/observer.py
|
oleks/gigalixir-cli
|
d1b1c303e24be548ddc895165e34652c378f4347
|
[
"MIT"
] | 52
|
2017-11-15T09:05:36.000Z
|
2021-11-29T07:58:47.000Z
|
gigalixir/observer.py
|
oleks/gigalixir-cli
|
d1b1c303e24be548ddc895165e34652c378f4347
|
[
"MIT"
] | 76
|
2017-08-17T16:58:48.000Z
|
2022-03-11T11:01:15.000Z
|
gigalixir/observer.py
|
oleks/gigalixir-cli
|
d1b1c303e24be548ddc895165e34652c378f4347
|
[
"MIT"
] | 50
|
2017-06-21T19:51:56.000Z
|
2022-01-19T01:58:19.000Z
|
import logging
import signal
import os
import urllib
import json
import re
import uuid
import requests
import sys
import subprocess
import time
from .shell import cast, call
from . import app as gigalixir_app
from six.moves.urllib.parse import quote
def observer(ctx, app_name, erlang_cookie, ssh_opts, ssh_cmd):
if not ctx.obj['router'].supports_multiplexing():
raise Exception("The observer command is not supported on this platform.")
host = ctx.obj['host']
r = requests.get('%s/api/apps/%s/observer-commands' % (host, quote(app_name.encode('utf-8'))), headers = {
'Content-Type': 'application/json',
})
if r.status_code != 200:
if r.status_code == 401:
raise auth.AuthException()
raise Exception(r.text)
else:
command = json.loads(r.text)["data"]
get_cookie_command = command["get_cookie"]
get_node_name_command = command["get_node_name"]
r = requests.get('%s/api/apps/%s/ssh_ip' % (host, quote(app_name.encode('utf-8'))), headers = {
'Content-Type': 'application/json',
})
if r.status_code != 200:
if r.status_code == 401:
raise auth.AuthException()
raise Exception(r.text)
else:
data = json.loads(r.text)["data"]
ssh_ip = data["ssh_ip"]
ssh_master_pid = None
control_path = "/tmp/gigalixir-cm-%s" % uuid.uuid4()
ssh_opts += " -S %s" % control_path
try:
logging.getLogger("gigalixir-cli").info("Setting up SSH multiplexing master")
cmd = "".join(["ssh %s" % (ssh_opts), " root@%s -N -M" % (ssh_ip)])
ssh_master_pid = subprocess.Popen(cmd.split()).pid
# wait for ssh master to connect
logging.getLogger("gigalixir-cli").info("Waiting for SSH multiplexing master")
time.sleep(5)
logging.getLogger("gigalixir-cli").info("Fetching erlang cookie")
if erlang_cookie is None:
ERLANG_COOKIE = gigalixir_app.distillery_eval(host, app_name, ssh_opts, ssh_cmd, get_cookie_command).strip("'")
else:
ERLANG_COOKIE = erlang_cookie
logging.getLogger("gigalixir-cli").info("Using erlang cookie: %s" % ERLANG_COOKIE)
logging.getLogger("gigalixir-cli").info("Fetching pod ip")
node_name = gigalixir_app.distillery_eval(host, app_name, ssh_opts, ssh_cmd, get_node_name_command)
# node_name is surrounded with single quotes
(sname, MY_POD_IP) = node_name.strip("'").split('@')
logging.getLogger("gigalixir-cli").info("Using pod ip: %s" % MY_POD_IP)
logging.getLogger("gigalixir-cli").info("Using node name: %s" % sname)
logging.getLogger("gigalixir-cli").info("Fetching epmd port and app port.")
output = gigalixir_app.ssh_helper(host, app_name, ssh_opts, ssh_cmd, True, "--", "epmd", "-names")
EPMD_PORT=None
APP_PORT=None
for line in output.splitlines():
match = re.match(r"^epmd: up and running on port (\d+) with data:$", line)
if match:
EPMD_PORT = match.groups()[0]
match = re.match(r"^name (.+) at port (\d+)$", line)
if match:
APP_PORT = match.groups()[1]
if EPMD_PORT == None:
raise Exception("EPMD_PORT not found.")
if APP_PORT == None:
raise Exception("APP_PORT not found.")
ensure_port_free(EPMD_PORT)
ensure_port_free(APP_PORT)
try:
logging.getLogger("gigalixir-cli").info("Setting up SSH tunnel for ports %s and %s" % (APP_PORT, EPMD_PORT))
cmd = "".join(["ssh %s -O forward -L %s" % (ssh_opts, APP_PORT), ":localhost:", "%s -L %s" % (APP_PORT, EPMD_PORT), ":localhost:", "%s root@%s" % (EPMD_PORT, ssh_ip)])
cast(cmd)
# no need to route if pod ip is 127.0.0.1
logging.getLogger("gigalixir-cli").info("Routing %s to 127.0.0.1" % MY_POD_IP)
ctx.obj['router'].route_to_localhost(MY_POD_IP, EPMD_PORT, APP_PORT)
name = uuid.uuid4()
# cmd = "iex --name %(name)s@%(MY_POD_IP)s --cookie %(ERLANG_COOKIE)s --hidden -e ':observer.start()'" % {"name": name, "MY_POD_IP": MY_POD_IP, "ERLANG_COOKIE": ERLANG_COOKIE}
cmd = "erl -name %(name)s@%(MY_POD_IP)s -setcookie %(ERLANG_COOKIE)s -hidden -run observer" % {"name": name, "MY_POD_IP": MY_POD_IP, "ERLANG_COOKIE": ERLANG_COOKIE}
logging.getLogger("gigalixir-cli").info("Running observer using: %s" % cmd)
logging.getLogger("gigalixir-cli").info("")
logging.getLogger("gigalixir-cli").info("")
logging.getLogger("gigalixir-cli").info("============")
logging.getLogger("gigalixir-cli").info("Instructions")
logging.getLogger("gigalixir-cli").info("============")
logging.getLogger("gigalixir-cli").info("In the 'Node' menu, click 'Connect Node'" )
logging.getLogger("gigalixir-cli").info("enter: %(sname)s@%(MY_POD_IP)s" % {"sname": sname, "MY_POD_IP": MY_POD_IP})
logging.getLogger("gigalixir-cli").info("and press OK.")
logging.getLogger("gigalixir-cli").info("")
logging.getLogger("gigalixir-cli").info("")
cast(cmd)
finally:
logging.getLogger("gigalixir-cli").info("Cleaning up route from %s to 127.0.0.1" % MY_POD_IP)
ctx.obj['router'].unroute_to_localhost(MY_POD_IP)
finally:
if ssh_master_pid:
# Needed because Ctrl-G -> q leaves it orphaned for some reason. is the subprocess
# not sent a signal on graceful termination?
logging.getLogger("gigalixir-cli").info("Cleaning up SSH multiplexing master")
try:
os.kill(ssh_master_pid, signal.SIGTERM)
except OSError:
# race condition if parent process tries to clean up subprocesses at the same
# time
pass
if os.path.exists(control_path):
logging.getLogger("gigalixir-cli").info("Deleting SSH multiplexing file")
try:
os.remove(control_path)
except OSError:
# race condition if ssh and we try to clean up the file at the same time
pass
def ensure_port_free(port):
try:
# if the port is in use, then a pid is found, this "succeeds" and continues
# if the port is free, then a pid is not found, this "fails" and raises a CalledProcessError
pid = call("lsof -wni tcp:%(port)s -t" % {"port": port})
# If multiplexing gets supported later, on Windows this command would be:
# pid = call("netstat -p tcp -n | find \"\"\":%(port)s\"\"\" % {"port": port})
raise Exception("It looks like process %s is using port %s on your local machine. We need this port to be able to connect observer. Please kill this process on your local machine and try again. e.g. `kill %s`" % (pid, port, pid))
except subprocess.CalledProcessError:
# success! continue
pass
| 48.253425
| 237
| 0.613911
|
8c9621639063d7b3499320f614cd0dc173d0c40f
| 8,492
|
py
|
Python
|
wavefront_api_client/models/paged_saved_search.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/models/paged_saved_search.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/models/paged_saved_search.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class PagedSavedSearch(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cursor': 'str',
'items': 'list[SavedSearch]',
'limit': 'int',
'more_items': 'bool',
'offset': 'int',
'sort': 'Sorting',
'total_items': 'int'
}
attribute_map = {
'cursor': 'cursor',
'items': 'items',
'limit': 'limit',
'more_items': 'moreItems',
'offset': 'offset',
'sort': 'sort',
'total_items': 'totalItems'
}
def __init__(self, cursor=None, items=None, limit=None, more_items=None, offset=None, sort=None, total_items=None, _configuration=None): # noqa: E501
"""PagedSavedSearch - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._cursor = None
self._items = None
self._limit = None
self._more_items = None
self._offset = None
self._sort = None
self._total_items = None
self.discriminator = None
if cursor is not None:
self.cursor = cursor
if items is not None:
self.items = items
if limit is not None:
self.limit = limit
if more_items is not None:
self.more_items = more_items
if offset is not None:
self.offset = offset
if sort is not None:
self.sort = sort
if total_items is not None:
self.total_items = total_items
@property
def cursor(self):
"""Gets the cursor of this PagedSavedSearch. # noqa: E501
The id at which the current (limited) search can be continued to obtain more matching items # noqa: E501
:return: The cursor of this PagedSavedSearch. # noqa: E501
:rtype: str
"""
return self._cursor
@cursor.setter
def cursor(self, cursor):
"""Sets the cursor of this PagedSavedSearch.
The id at which the current (limited) search can be continued to obtain more matching items # noqa: E501
:param cursor: The cursor of this PagedSavedSearch. # noqa: E501
:type: str
"""
self._cursor = cursor
@property
def items(self):
"""Gets the items of this PagedSavedSearch. # noqa: E501
List of requested items # noqa: E501
:return: The items of this PagedSavedSearch. # noqa: E501
:rtype: list[SavedSearch]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this PagedSavedSearch.
List of requested items # noqa: E501
:param items: The items of this PagedSavedSearch. # noqa: E501
:type: list[SavedSearch]
"""
self._items = items
@property
def limit(self):
"""Gets the limit of this PagedSavedSearch. # noqa: E501
:return: The limit of this PagedSavedSearch. # noqa: E501
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this PagedSavedSearch.
:param limit: The limit of this PagedSavedSearch. # noqa: E501
:type: int
"""
self._limit = limit
@property
def more_items(self):
"""Gets the more_items of this PagedSavedSearch. # noqa: E501
Whether more items are available for return by increment offset or cursor # noqa: E501
:return: The more_items of this PagedSavedSearch. # noqa: E501
:rtype: bool
"""
return self._more_items
@more_items.setter
def more_items(self, more_items):
"""Sets the more_items of this PagedSavedSearch.
Whether more items are available for return by increment offset or cursor # noqa: E501
:param more_items: The more_items of this PagedSavedSearch. # noqa: E501
:type: bool
"""
self._more_items = more_items
@property
def offset(self):
"""Gets the offset of this PagedSavedSearch. # noqa: E501
:return: The offset of this PagedSavedSearch. # noqa: E501
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this PagedSavedSearch.
:param offset: The offset of this PagedSavedSearch. # noqa: E501
:type: int
"""
self._offset = offset
@property
def sort(self):
"""Gets the sort of this PagedSavedSearch. # noqa: E501
:return: The sort of this PagedSavedSearch. # noqa: E501
:rtype: Sorting
"""
return self._sort
@sort.setter
def sort(self, sort):
"""Sets the sort of this PagedSavedSearch.
:param sort: The sort of this PagedSavedSearch. # noqa: E501
:type: Sorting
"""
self._sort = sort
@property
def total_items(self):
"""Gets the total_items of this PagedSavedSearch. # noqa: E501
An estimate (lower-bound) of the total number of items available for return. May not be a tight estimate for facet queries # noqa: E501
:return: The total_items of this PagedSavedSearch. # noqa: E501
:rtype: int
"""
return self._total_items
@total_items.setter
def total_items(self, total_items):
"""Sets the total_items of this PagedSavedSearch.
An estimate (lower-bound) of the total number of items available for return. May not be a tight estimate for facet queries # noqa: E501
:param total_items: The total_items of this PagedSavedSearch. # noqa: E501
:type: int
"""
self._total_items = total_items
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PagedSavedSearch, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PagedSavedSearch):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, PagedSavedSearch):
return True
return self.to_dict() != other.to_dict()
| 29.486111
| 409
| 0.593264
|
dcea7e058f217b4cdbce11e5bcd37673fb5e92b7
| 3,013
|
py
|
Python
|
backend/workspaces/Jobs/views/jobList.py
|
makakken/roseguarden
|
9a867f3d5e979b990bf474dcba81e5e9d0814c6a
|
[
"MIT"
] | null | null | null |
backend/workspaces/Jobs/views/jobList.py
|
makakken/roseguarden
|
9a867f3d5e979b990bf474dcba81e5e9d0814c6a
|
[
"MIT"
] | 50
|
2021-03-28T03:06:19.000Z
|
2021-10-18T12:36:16.000Z
|
backend/workspaces/Jobs/views/jobList.py
|
makakken/roseguarden
|
9a867f3d5e979b990bf474dcba81e5e9d0814c6a
|
[
"MIT"
] | 1
|
2021-07-30T07:12:46.000Z
|
2021-07-30T07:12:46.000Z
|
"""
The roseguarden project
Copyright (C) 2018-2020 Marcus Drobisch,
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
__authors__ = ["Marcus Drobisch"]
__contact__ = "roseguarden@fabba.space"
__credits__ = []
__license__ = "GPLv3"
from core.workspaces.workspace import Workspace
from core.workspaces.dataView import DataView
from core.jobs import jobManager
from core.users.models import User
""" A view contaning a list of permissions
"""
class JobList(DataView):
uri = 'jobList'
requireLogin = True
# def __init__(self):
# super().__init__(name='PermissionList', uri ='permissionList')
def defineProperties(self):
self.addStringProperty(name='id', label='ID', isKey=True)
self.addStringProperty(name='workspace', label='Workspace')
self.addStringProperty(name='name', label='Name')
self.addStringProperty(name='trigger', label='Triggered')
self.addStringProperty(name='log', label='Log')
self.addStringProperty(name='need_parameters', label='Parameters')
self.addStringProperty(name='description', label='Description')
def getViewHandler(self, user: User, workspace: Workspace, query=None):
print("getDataViewHandler for JobList")
entrylist = []
all_jobs = jobManager.get_jobs()
for key, j in all_jobs.items():
# get new empty entry
entry = self.createEntry()
# fill entry
entry.id = key
entry.name = j.name
entry.workspace = j.workspace
entry.description = j.description
entry.trigger = j.trigger
if j.parameters is None:
entry.need_parameters = "No"
else:
entry.need_parameters = "Yes"
if j.log_in_db:
entry.log = "Yes"
else:
entry.log = "No"
entrylist.append(entry.extract())
return entrylist
def __repr__(self):
return '<{} with {} properties>'.format(self.name, len(self.properties))
# Handler for a request to create a new view entry
def createViewEntryHandler(self, user, workspace, entry):
print("Handle createViewEntry request for " + self.uri)
# Handler for a request to update a single view entry
def updateViewEntryHandler(self, user, workspace, key, entry):
print("Handle updateViewEntryHandler request for " + self.uri)
| 34.632184
| 80
| 0.668769
|
03b64ed7c278c96f2c6d5c37474ae7716fe225d3
| 8,459
|
py
|
Python
|
reviews/google_structured_data.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | null | null | null |
reviews/google_structured_data.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | 4
|
2020-06-05T18:21:18.000Z
|
2021-06-10T20:17:31.000Z
|
reviews/google_structured_data.py
|
UrbanBogger/horrorexplosion
|
3698e00a6899a5e8b224cd3d1259c3deb3a2ca80
|
[
"MIT"
] | null | null | null |
from .models import return_mov_participation_data
MS_REVIEW_AUTHOR = 'Mitch Sokolov'
DD_REVIEW_AUTHOR = 'Dave Dukowski'
ORGANIZATION_NAME = 'The Horror Explosion'
ORGANIZATION_HOME_URL = 'http://www.horrorexplosion.com/'
CONTRIBUTORS_URL = 'http://www.horrorexplosion.com/reviews/contributors/'
CONTEXT = 'https://schema.org'
CONTEXT_KEY = '@context'
TYPE_KEY = '@type'
REVIEW_TYPE = 'Review'
PERSON_TYPE = 'Person'
ORGANISATION_TYPE = 'Organization'
MOVIE_TYPE = 'Movie'
TV_SERIES_TYPE = 'TVSeries'
TV_SEASON_TYPE = 'TVSeason'
TV_EPISODE_TYPE = 'TVEpisode'
def get_review_author(review):
review_author = ''
if str(review.review_author) == 'M.S.':
review_author = MS_REVIEW_AUTHOR
elif str(review.review_author) == 'D.D.':
review_author = DD_REVIEW_AUTHOR
return review_author
def get_director_key(directors):
dir_key = None
director = None
if directors and len(directors) == 1:
dir_key = 'director'
director = directors[0]
elif directors and len(directors) > 1:
dir_key = 'directors'
director = directors
return dir_key, director
def mov_review_sd(mov_rev, db_object_absolute_url=''):
review_author = get_review_author(mov_rev)
mov_directors = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(
mov_rev.reviewed_movie, 'Director')]
mov_cast = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(
mov_rev.reviewed_movie, 'Actor')]
dir_key, director = get_director_key(mov_directors)
country_of_origin = [str(country) for country in
mov_rev.reviewed_movie.country_of_origin.all()]
if country_of_origin and len(country_of_origin) == 1:
country_of_origin = country_of_origin[0]
if not (mov_cast and director and country_of_origin):
return None
structured_data = {
CONTEXT_KEY: CONTEXT,
TYPE_KEY: REVIEW_TYPE,
'author': {TYPE_KEY: PERSON_TYPE,
'name': review_author,
'sameAs': CONTRIBUTORS_URL},
'url': db_object_absolute_url,
'datePublished': mov_rev.first_created.isoformat(),
'publisher': {TYPE_KEY: ORGANISATION_TYPE,
'name': ORGANIZATION_NAME,
'sameAs': ORGANIZATION_HOME_URL},
'description': mov_rev.mov_review_page_description,
'inLanguage': 'en',
'itemReviewed': {TYPE_KEY: MOVIE_TYPE,
'name': str(mov_rev.reviewed_movie.main_title),
'sameAs': mov_rev.reviewed_movie.imdb_link,
'image': mov_rev.reviewed_movie.poster.url,
dir_key: director,
'actors': mov_cast,
'countryOfOrigin': country_of_origin,
'dateCreated': str(
mov_rev.reviewed_movie.year_of_release),
},
'reviewRating': {TYPE_KEY: 'Rating',
'worstRating': 0.5,
'bestRating': 4.0,
'ratingValue': float(mov_rev.grade.grade_numerical)},
'reviewBody': mov_rev.review_snippet
}
return structured_data
def mov_sd(movie):
mov_directors = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(movie, 'Director')]
mov_cast = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(movie, 'Actor')]
dir_key, director = get_director_key(mov_directors)
country_of_origin = [str(country) for country in
movie.country_of_origin.all()]
if country_of_origin and len(country_of_origin) == 1:
country_of_origin = country_of_origin[0]
if not (mov_cast and director and country_of_origin):
return None
structured_data = {
CONTEXT_KEY: CONTEXT,
TYPE_KEY: MOVIE_TYPE,
'name': str(movie.main_title),
'sameAs': movie.imdb_link,
'image': movie.poster.url,
dir_key: director,
'actors': mov_cast,
'countryOfOrigin': country_of_origin,
'dateCreated': str(movie.year_of_release)
}
return structured_data
def tv_episode_rev_sd(tv_episode_rev, db_object_absolute_url=''):
grade = None
review_author = get_review_author(tv_episode_rev)
print('MOV PARTICIPATIONS: ' + str(return_mov_participation_data(
tv_episode_rev.reviewed_tv_episode, 'Director')))
episode_directors = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(
tv_episode_rev.reviewed_tv_episode, 'Director')]
episode_cast = [str(mov_participation.person) for mov_participation in
return_mov_participation_data(
tv_episode_rev.reviewed_tv_episode, 'Actor')]
print('DIRECTORS: ' + str(episode_directors))
dir_key, director = get_director_key(episode_directors)
image = None
if tv_episode_rev.reviewed_tv_episode.poster:
image = tv_episode_rev.reviewed_tv_episode.poster.url
else:
image = tv_episode_rev.reviewed_tv_episode.tv_season.tv_series.poster.\
url
country_of_origin = [
str(country) for country in
tv_episode_rev.reviewed_tv_episode.tv_season.country_of_origin.all()]
if country_of_origin and len(country_of_origin) == 1:
country_of_origin = country_of_origin[0]
if not (episode_cast and director and image and country_of_origin):
return None
if tv_episode_rev.grade:
grade = float(tv_episode_rev.grade.grade_numerical)
elif tv_episode_rev.tvepisodesegmentreview_set.all().exists():
grades = [float(tv_ep_rev_seg.grade.grade_numerical)
for tv_ep_rev_seg in
tv_episode_rev.tvepisodesegmentreview_set.all()]
grade = round(sum(grades)/len(grades) * 2.0 / 2.0)
structured_data = {
CONTEXT_KEY: CONTEXT,
TYPE_KEY: REVIEW_TYPE,
'author': {TYPE_KEY: PERSON_TYPE,
'name': review_author,
'sameAs': CONTRIBUTORS_URL},
'url': db_object_absolute_url,
'datePublished': tv_episode_rev.first_created.isoformat(),
'publisher': {TYPE_KEY: ORGANISATION_TYPE,
'name': ORGANIZATION_NAME,
'sameAs': ORGANIZATION_HOME_URL},
'description': tv_episode_rev.mov_review_page_description,
'inLanguage': 'en',
'itemReviewed': {TYPE_KEY: TV_SERIES_TYPE,
'name': str(tv_episode_rev.reviewed_tv_episode.
tv_season.tv_series.main_title),
'sameAs': tv_episode_rev.reviewed_tv_episode.
tv_season.tv_series.imdb_link,
'containsSeason': {
'datePublished': tv_episode_rev.
reviewed_tv_episode.tv_season.year_of_release,
'episode': {
TYPE_KEY: TV_EPISODE_TYPE,
'episodeNumber': str(
tv_episode_rev.reviewed_tv_episode.
episode_number),
'name': tv_episode_rev.reviewed_tv_episode.
episode_title,
'sameAs': tv_episode_rev.reviewed_tv_episode.
imdb_link,
'image': str(image),
dir_key: director,
'actors': episode_cast,
'countryOfOrigin': country_of_origin
},
'name': tv_episode_rev.reviewed_tv_episode.
tv_season.season_title
}
},
'reviewRating': {TYPE_KEY: 'Rating',
'worstRating': 0.5,
'bestRating': 4.0,
'ratingValue': grade},
'reviewBody': tv_episode_rev.review_snippet
}
return structured_data
| 39.900943
| 79
| 0.593924
|
fd83724a0df2b11853b181ae17f1baeac319895d
| 1,000
|
py
|
Python
|
blog/models.py
|
burhankapadia18/savagecoder
|
bbb5150473abcd1dd9a3557fdfcb2ef0d7ac7f41
|
[
"MIT"
] | 1
|
2021-07-27T07:32:42.000Z
|
2021-07-27T07:32:42.000Z
|
blog/models.py
|
burhankapadia18/savagecoder
|
bbb5150473abcd1dd9a3557fdfcb2ef0d7ac7f41
|
[
"MIT"
] | null | null | null |
blog/models.py
|
burhankapadia18/savagecoder
|
bbb5150473abcd1dd9a3557fdfcb2ef0d7ac7f41
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from django.utils.timezone import now
# Create your models here.
class Post(models.Model):
sno = models.AutoField(primary_key=True)
title = models.CharField(max_length=225)
content = models.TextField()
author = models.CharField(max_length=100)
slug = models.CharField(max_length=100)
views = models.IntegerField(default=0)
timeStamp = models.DateTimeField(blank=True)
def __str__(self):
return self.title + " by " + self.author
class BlogComment(models.Model):
sno = models.AutoField(primary_key=True)
comment = models.TextField()
user = models.ForeignKey(User, on_delete=models.CASCADE)
post = models.ForeignKey(Post, on_delete=models.CASCADE)
parent = models.ForeignKey('self', on_delete=models.CASCADE, null=True)
timestamp = models.DateTimeField(default=now)
def __str__(self):
return self.comment[:13] + "... by " + self.user.username
| 32.258065
| 75
| 0.715
|
def1a13b0bb44dfdc0d9765f91f1290a5154ecff
| 1,831
|
py
|
Python
|
compiler/daanpnr.py
|
PietPtr/verilog2minecraft
|
911a4e35df80c340747bbc7b53019a90cbac9e3b
|
[
"MIT"
] | null | null | null |
compiler/daanpnr.py
|
PietPtr/verilog2minecraft
|
911a4e35df80c340747bbc7b53019a90cbac9e3b
|
[
"MIT"
] | null | null | null |
compiler/daanpnr.py
|
PietPtr/verilog2minecraft
|
911a4e35df80c340747bbc7b53019a90cbac9e3b
|
[
"MIT"
] | null | null | null |
import random
import numpy as np
from compiler.cell_defs import minecraft_cell_lib
from compiler.graph import collides_with_any
def place_and_route(unplaced):
to_place = [(None, unplaced[1])]
placed_cells = [x for x in unplaced if x.placed]
print(placed_cells)
while len(to_place) > 0:
placer, cell = to_place[0]
if cell in placed_cells:
to_place.pop(0)
continue
gv = minecraft_cell_lib[cell.celltype][0]
if placer is None:
cell.place(np.array([20, 20, 20]), gv)
else:
new_position = np.array(placer.position)
new_position[0] += placer.gate_version.size[0] + 1
while collides_with_any(new_position, gv.size, placed_cells):
# print("position collides:", new_position)
new_position[0] += random.randint(-2, 2)
new_position[1] = min(200, max(5, new_position[1] + random.randint(-2, 2)))
new_position[2] += random.randint(-2, 2)
for i in range(3):
new_position[i] = max(20, new_position[i])
cell.place(new_position, gv)
for inputs in cell.inputs.values():
for input, _ in inputs:
if input not in placed_cells and input not in to_place:
to_place.append((cell, input))
for outputs in cell.outputs.values():
for output, _ in outputs:
if output not in placed_cells and output not in to_place:
to_place.append((cell, output))
placed_cells.append(cell)
to_place.pop(0)
print("placed: ", cell, "total: ", len(placed_cells), "out of", len(unplaced))
print("Finished")
print(unplaced)
return unplaced
def spring_graph(graph):
springs = []
| 32.122807
| 91
| 0.586565
|
5c204ab769e4dcb6393793ff1c1fc060fee38ec5
| 3,576
|
py
|
Python
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 1
|
2021-06-02T08:01:35.000Z
|
2021-06-02T08:01:35.000Z
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 1
|
2020-03-06T05:57:16.000Z
|
2020-03-06T05:57:16.000Z
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .copy_sink import CopySink
class AzureDataExplorerSink(CopySink):
"""A copy activity Azure Data Explorer sink.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param write_batch_size: Write batch size. Type: integer (or Expression
with resultType integer), minimum: 0.
:type write_batch_size: object
:param write_batch_timeout: Write batch timeout. Type: string (or
Expression with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type write_batch_timeout: object
:param sink_retry_count: Sink retry count. Type: integer (or Expression
with resultType integer).
:type sink_retry_count: object
:param sink_retry_wait: Sink retry wait. Type: string (or Expression with
resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type sink_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the sink data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param ingestion_mapping_name: A name of a pre-created csv mapping that
was defined on the target Kusto table. Type: string.
:type ingestion_mapping_name: object
:param ingestion_mapping_as_json: An explicit column mapping description
provided in a json format. Type: string.
:type ingestion_mapping_as_json: object
:param flush_immediately: If set to true, any aggregation will be skipped.
Default is false. Type: boolean.
:type flush_immediately: object
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'},
'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'},
'flush_immediately': {'key': 'flushImmediately', 'type': 'object'},
}
def __init__(self, **kwargs):
super(AzureDataExplorerSink, self).__init__(**kwargs)
self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None)
self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None)
self.flush_immediately = kwargs.get('flush_immediately', None)
self.type = 'AzureDataExplorerSink'
| 46.441558
| 92
| 0.657718
|
89862f357f5f4151aa9e6187c0b86073e91413eb
| 823
|
py
|
Python
|
src/tutorialsite/tutorialsite/urls.py
|
hawwestin/MSR.Polls_sample
|
c88a30d763f319eecf2a8a69f8d39e0056a2aaf0
|
[
"MIT"
] | null | null | null |
src/tutorialsite/tutorialsite/urls.py
|
hawwestin/MSR.Polls_sample
|
c88a30d763f319eecf2a8a69f8d39e0056a2aaf0
|
[
"MIT"
] | null | null | null |
src/tutorialsite/tutorialsite/urls.py
|
hawwestin/MSR.Polls_sample
|
c88a30d763f319eecf2a8a69f8d39e0056a2aaf0
|
[
"MIT"
] | null | null | null |
"""tutorialsite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls import include
urlpatterns = [
path('', include('polls.urls')),
path('admin/', admin.site.urls),
]
| 34.291667
| 77
| 0.708384
|
c9a9309ad05cfabd46494c3cb1992effbdab6530
| 9,006
|
py
|
Python
|
examples/music_drop_fade.py
|
czogran/pygame1
|
1591a3295402f914950ae15039b91136d8bf8f79
|
[
"Python-2.0",
"OLDAP-2.3"
] | 3
|
2021-06-13T00:22:21.000Z
|
2021-06-13T11:58:35.000Z
|
examples/music_drop_fade.py
|
czogran/pygame1
|
1591a3295402f914950ae15039b91136d8bf8f79
|
[
"Python-2.0",
"OLDAP-2.3"
] | 7
|
2021-06-18T00:10:09.000Z
|
2022-03-28T21:10:20.000Z
|
examples/music_drop_fade.py
|
czogran/pygame1
|
1591a3295402f914950ae15039b91136d8bf8f79
|
[
"Python-2.0",
"OLDAP-2.3"
] | 4
|
2021-03-24T07:28:37.000Z
|
2021-10-03T14:30:46.000Z
|
#!/usr/bin/env python
""" pygame.examples.music_drop_fade
Fade in and play music from a list while observing several events
Adds music files to a playlist whenever played by one of the following methods
Music files passed from the commandline are played
Music files and filenames are played when drag and dropped onto the pygame window
Polls the clipboard and plays music files if it finds one there
Keyboard Controls:
* Press space or enter to pause music playback
* Press up or down to change the music volume
* Press left or right to seek 5 seconds into the track
* Press escape to quit
* Press any other button to skip to the next music file in the list
"""
import pygame as pg
import os, sys
VOLUME_CHANGE_AMOUNT = 0.02 # how fast should up and down arrows change the volume?
def add_file(filename):
"""
This function will check if filename exists and is a music file
If it is the file will be added to a list of music files(even if already there)
Type checking is by the extension of the file, not by its contents
We can only discover if the file is valid when we mixer.music.load() it later
It looks in the file directory and its data subdirectory
"""
if filename.rpartition(".")[2].lower() not in music_file_types:
print("{} not added to file list".format(filename))
print("only these files types are allowed: ", music_file_types)
return False
elif os.path.exists(filename):
music_file_list.append(filename)
elif os.path.exists(os.path.join(main_dir, filename)):
music_file_list.append(os.path.join(main_dir, filename))
elif os.path.exists(os.path.join(data_dir, filename)):
music_file_list.append(os.path.join(data_dir, filename))
else:
print("file not found")
return False
print("{} added to file list".format(filename))
return True
def play_file(filename):
"""
This function will call add_file and play it if successful
The music will fade in during the first 4 seconds
set_endevent is used to post a MUSIC_DONE event when the song finishes
The main loop will call play_next() when the MUSIC_DONE event is received
"""
global starting_pos
if add_file(filename):
try: # we must do this in case the file is not a valid audio file
pg.mixer.music.load(music_file_list[-1])
except pg.error as e:
print(e) # print description such as 'Not an Ogg Vorbis audio stream'
if filename in music_file_list:
music_file_list.remove(filename)
print("{} removed from file list".format(filename))
return
pg.mixer.music.play(fade_ms=4000)
pg.mixer.music.set_volume(volume)
if filename.rpartition(".")[2].lower() in music_can_seek:
print("file supports seeking")
starting_pos = 0
else:
print("file does not support seeking")
starting_pos = -1
pg.mixer.music.set_endevent(MUSIC_DONE)
def play_next():
"""
This function will play the next song in music_file_list
It uses pop(0) to get the next song and then appends it to the end of the list
The song will fade in during the first 4 seconds
"""
global starting_pos
if len(music_file_list) > 1:
nxt = music_file_list.pop(0)
try:
pg.mixer.music.load(nxt)
except pg.error as e:
print(e)
print("{} removed from file list".format(nxt))
music_file_list.append(nxt)
print("starting next song: ", nxt)
else:
nxt = music_file_list[0]
pg.mixer.music.play(fade_ms=4000)
pg.mixer.music.set_volume(volume)
pg.mixer.music.set_endevent(MUSIC_DONE)
if nxt.rpartition(".")[2].lower() in music_can_seek:
starting_pos = 0
else:
starting_pos = -1
def draw_text_line(text, y=0):
"""
Draws a line of text onto the display surface
The text will be centered horizontally at the given y postition
The text's height is added to y and returned to the caller
"""
screen = pg.display.get_surface()
surf = font.render(text, 1, (255, 255, 255))
y += surf.get_height()
x = (screen.get_width() - surf.get_width()) / 2
screen.blit(surf, (x, y))
return y
def change_music_postion(amount):
"""
Changes current playback postition by amount seconds.
This only works with OGG and MP3 files.
music.get_pos() returns how many milliseconds the song has played, not
the current postion in the file. We must track the starting postion
ourselves. music.set_pos() will set the position in seconds.
"""
global starting_pos
if starting_pos >= 0: # will be -1 unless play_file() was OGG or MP3
played_for = pg.mixer.music.get_pos() / 1000.0
old_pos = starting_pos + played_for
starting_pos = old_pos + amount
pg.mixer.music.play(start=starting_pos)
print("jumped from {} to {}".format(old_pos, starting_pos))
MUSIC_DONE = pg.event.custom_type() # event to be set as mixer.music.set_endevent()
main_dir = os.path.split(os.path.abspath(__file__))[0]
data_dir = os.path.join(main_dir, "data")
starting_pos = 0 # needed to fast forward and rewind
volume = 0.75
music_file_list = []
music_file_types = ("mp3", "ogg", "mid", "mod", "it", "xm", "wav")
music_can_seek = ("mp3", "ogg", "mod", "it", "xm")
def main():
global font # this will be used by the draw_text_line function
global volume, starting_pos
running = True
paused = False
# we will be polling for key up and key down events
# users should be able to change the volume by holding the up and down arrows
# the change_volume variable will be set by key down events and cleared by key up events
change_volume = 0
pg.init()
pg.display.set_mode((640, 480))
font = pg.font.SysFont("Arial", 24)
clock = pg.time.Clock()
pg.scrap.init()
pg.SCRAP_TEXT = pg.scrap.get_types()[0] # TODO remove when scrap module is fixed
clipped = pg.scrap.get(pg.SCRAP_TEXT).decode(
"UTF-8") # store the current text from the clipboard TODO remove decode
# add the command line arguments to the music_file_list
for arg in sys.argv[1:]:
add_file(arg)
play_file("house_lo.ogg") # play default music included with pygame
# draw instructions on screen
y = draw_text_line("Drop music files or path names onto this window", 20)
y = draw_text_line("Copy file names into the clipboard", y)
y = draw_text_line("Or feed them from the command line", y)
y = draw_text_line("If it's music it will play!", y)
y = draw_text_line("SPACE to pause or UP/DOWN to change volume", y)
y = draw_text_line("LEFT and RIGHT will skip around the track", y)
draw_text_line("Other keys will start the next track", y)
"""
This is the main loop
It will respond to drag and drop, clipboard changes, and key presses
"""
while running:
for ev in pg.event.get():
if ev.type == pg.QUIT:
running = False
elif ev.type == pg.DROPTEXT:
play_file(ev.text)
elif ev.type == pg.DROPFILE:
play_file(ev.file)
elif ev.type == MUSIC_DONE:
play_next()
elif ev.type == pg.KEYDOWN:
if ev.key == pg.K_ESCAPE:
running = False # exit loop
elif ev.key in (pg.K_SPACE, pg.K_RETURN):
if paused:
pg.mixer.music.unpause()
paused = False
else:
pg.mixer.music.pause()
paused = True
elif ev.key == pg.K_UP:
change_volume = VOLUME_CHANGE_AMOUNT
elif ev.key == pg.K_DOWN:
change_volume = -VOLUME_CHANGE_AMOUNT
elif ev.key == pg.K_RIGHT:
change_music_postion(+5)
elif ev.key == pg.K_LEFT:
change_music_postion(-5)
else:
play_next()
elif ev.type == pg.KEYUP:
if ev.key in (pg.K_UP, pg.K_DOWN):
change_volume = 0
# is the user holding up or down?
if change_volume:
volume += change_volume
volume = min(max(0, volume), 1) # volume should be between 0 and 1
pg.mixer.music.set_volume(volume)
print("volume:", volume)
# TODO remove decode when SDL2 scrap is fixed
new_text = pg.scrap.get(pg.SCRAP_TEXT).decode("UTF-8")
if new_text != clipped: # has the clipboard changed?
clipped = new_text
play_file(clipped) # try to play the file if it has
pg.display.flip()
clock.tick(9) # keep CPU use down by updating screen less often
pg.quit()
if __name__ == "__main__":
main()
| 36.024
| 92
| 0.63058
|
ae14fd394a7b0d1f4196e9cc1de22c95b7e61737
| 36,932
|
py
|
Python
|
v3.x/aerialvision/startup.py
|
csl-iisc/dws
|
46d6c60d2b832525a8edf7b117b4d392c6a2f967
|
[
"Unlicense"
] | null | null | null |
v3.x/aerialvision/startup.py
|
csl-iisc/dws
|
46d6c60d2b832525a8edf7b117b4d392c6a2f967
|
[
"Unlicense"
] | null | null | null |
v3.x/aerialvision/startup.py
|
csl-iisc/dws
|
46d6c60d2b832525a8edf7b117b4d392c6a2f967
|
[
"Unlicense"
] | 4
|
2021-04-24T00:08:23.000Z
|
2021-05-13T06:30:21.000Z
|
#!/usr/bin/env python
# Copyright (C) 2009 by Aaron Ariel, Tor M. Aamodt, Andrew Turner, Wilson W. L.
# Fung, Zev Weiss and the University of British Columbia, Vancouver,
# BC V6T 1Z4, All Rights Reserved.
#
# THIS IS A LEGAL DOCUMENT BY DOWNLOADING GPGPU-SIM, YOU ARE AGREEING TO THESE
# TERMS AND CONDITIONS.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNERS OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# NOTE: The files libcuda/cuda_runtime_api.c and src/cuda-sim/cuda-math.h
# are derived from the CUDA Toolset available from http://www.nvidia.com/cuda
# (property of NVIDIA). The files benchmarks/BlackScholes/ and
# benchmarks/template/ are derived from the CUDA SDK available from
# http://www.nvidia.com/cuda (also property of NVIDIA). The files from
# src/intersim/ are derived from Booksim (a simulator provided with the
# textbook "Principles and Practices of Interconnection Networks" available
# from http://cva.stanford.edu/books/ppin/). As such, those files are bound by
# the corresponding legal terms and conditions set forth separately (original
# copyright notices are left in files from these sources and where we have
# modified a file our copyright notice appears before the original copyright
# notice).
#
# Using this version of GPGPU-Sim requires a complete installation of CUDA
# which is distributed seperately by NVIDIA under separate terms and
# conditions. To use this version of GPGPU-Sim with OpenCL requires a
# recent version of NVIDIA's drivers which support OpenCL.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the University of British Columbia nor the names of
# its contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# 4. This version of GPGPU-SIM is distributed freely for non-commercial use only.
#
# 5. No nonprofit user may place any restrictions on the use of this software,
# including as modified by the user, by any other authorized user.
#
# 6. GPGPU-SIM was developed primarily by Tor M. Aamodt, Wilson W. L. Fung,
# Ali Bakhoda, George L. Yuan, at the University of British Columbia,
# Vancouver, BC V6T 1Z4
import sys
import Tkinter as Tk
import Pmw
import lexyacc
import guiclasses
import tkFileDialog as Fd
import organizedata
import os
import os.path
global TabsForGraphs
global Filenames
global TabsForText
global SourceCode
Filenames = []
TabsForGraphs = []
vars = {}
TabsForText = []
userSettingPath = os.path.join(os.environ['HOME'], '.gpgpu_sim', 'aerialvision')
def checkEmpty(list):
bool = 0
try:
if type(list[0]).__name__ == 'list':
for iter in list:
for x in iter:
if ((int(x) != 0) and (x != 'NULL')):
bool = 1
return bool
else:
for x in list:
if ((x != 'NULL') and (int(x) != 0)):
bool = 1
return bool
except:
for x in list:
if ((int(x) != 0) and (x != 'NULL')):
bool = 1
return bool
def fileInput(cl_files=None):
# The Main Window Stuff
# Instantiate the window
instance = Tk.Tk();
instance.title("File Input")
#set the window size
root = Tk.Frame(instance, width = 1100, height = 550, bg = 'white');
root.pack_propagate(0);
root.pack();
#Title at top of Page
rootTitle = Tk.Label(root, text='AerialVision 1.1', font = ("Gill Sans MT", 20, "bold"), bg = 'white');
rootTitle.pack(side = Tk.TOP);
fileInputTitle = Tk.Label(root, text='Please Fill Out Specifications \n to Get Started', font = ("Gill Sans MT", 15, "bold", "underline"), bg = 'white', width = 400)
fileInputTitle.pack(side = Tk.TOP)
inputTabs = Pmw.NoteBook(root)
inputTabs.pack(fill = 'both', expand = 'True')
fileInputOuter = inputTabs.add('File Inputs for Time Lapse View')
fileInputTextEditor = inputTabs.add('File Inputs for Source Code View')
#################### The time lapse view side #############################3
fileInput = Tk.Frame(fileInputOuter, bg = 'white', borderwidth = 5, relief = Tk.GROOVE)
fileInput.pack()
specChoices = Tk.Frame(fileInput, bg= 'white')
specChoices.pack(side = Tk.TOP, anchor = Tk.W, pady = 30)
addFile = Tk.Frame(specChoices, bg = 'white')
addFile.pack(side = Tk.TOP, anchor = Tk.W)
lAddFile = Tk.Label(addFile, text= "Add Input File: ", bg= 'white')
lAddFile.pack(side = Tk.LEFT)
eAddFile = Tk.Entry(addFile, width= 30, bg = 'white')
eAddFile.pack(side = Tk.LEFT)
bClearEntry = Tk.Button(addFile, text = "Clear", command = (lambda: clearField(eAddFile)))
bClearEntry.pack(side = Tk.LEFT)
bAddFileSubmit = Tk.Button(addFile, text = "Add File", command = (lambda: addToListbox(cFilesAdded, eAddFile.get(),eAddFile)))
bAddFileSubmit.pack(side = Tk.LEFT)
#Loading the most recent directory visited as the first directory
try:
loadfile = open(os.path.join(userSettingPath, 'recentfiles.txt'), 'r')
tmprecentfile = loadfile.readlines()
recentfile = ''
tmprecentfile = tmprecentfile[len(tmprecentfile) -1]
tmprecentfile = tmprecentfile.split('/')
for iter in range(1,len(tmprecentfile) - 1):
recentfile = recentfile + '/' + tmprecentfile[iter]
except IOError,e:
if e.errno == 2:
# recentfiles.txt does not exist, ignore and use CWD
recentfile = '.'
else:
raise e
bAddFileBrowse = Tk.Button(addFile, text = "Browse", command = (lambda: eAddFile.insert(0,Fd.askopenfilename(initialdir=recentfile ))))
bAddFileBrowse.pack(side = Tk.LEFT)
bAddFileRecentFiles = Tk.Button(addFile, text = "Recent Files", command = (lambda: loadRecentFile(eAddFile)))
bAddFileRecentFiles.pack(side = Tk.LEFT)
filesAdded = Tk.Frame(specChoices, bg = 'white')
filesAdded.pack(side = Tk.TOP, anchor = Tk.W)
lFilesAdded = Tk.Label(filesAdded, text = "Files Added: ", bg = 'white')
lFilesAdded.pack(side = Tk.LEFT)
cFilesAdded = Tk.Listbox(filesAdded, width = 100, height = 5, bg = 'white')
cFilesAdded.pack(side = Tk.LEFT)
bFilesAddedRem = Tk.Button(filesAdded, text = "Remove", command = (lambda: delFileFromListbox(cFilesAdded)))
bFilesAddedRem.pack(side = Tk.LEFT)
screenRes = Tk.Frame(specChoices, bg = 'white')
screenRes.pack(side = Tk.TOP, anchor= Tk.W)
lScreenResolution = Tk.Label(screenRes, text = "Choose Closest Screen Resolution: ", bg = 'white')
lScreenResolution.pack(side = Tk.LEFT)
Modes = [("1024 x 768", "1"), ("1600 x 1200", "3") ]
num = Tk.StringVar()
num.set("3")
for text, mode in Modes:
bRes = Tk.Radiobutton(specChoices, text = text, variable = num, value = mode, bg = 'white')
bRes.pack(anchor = Tk.W)
# add files specified on command line to listbox
if cl_files:
#print "adding", cl_files
addListToListbox(cFilesAdded,cl_files)
recentfile = cl_files[len(cl_files) -1]
# check box to skip parsing of CFLog
skipCFLogVar = Tk.IntVar()
skipCFLogVar.set(1)
cbSkipCFLog = Tk.Checkbutton(specChoices, text = "Skip CFLog parsing", bg = 'white', variable = skipCFLogVar)
cbSkipCFLog.pack(side = Tk.LEFT)
# check box to activate converting CFLog to CUDA source line
cflog2cuda = Tk.IntVar()
cflog2cuda.set(0)
cbCFLog2CUDAsrc = Tk.Checkbutton(specChoices, text = "Convert CFLog to CUDA source line", bg = 'white', variable = cflog2cuda)
cbCFLog2CUDAsrc.pack(side = Tk.LEFT)
############### The source code view side ##################################
fileInputTE = Tk.Frame(fileInputTextEditor, bg = 'white', borderwidth = 5, relief = Tk.GROOVE)
fileInputTE.pack()
###### INPUT a Text File ###############
specChoicesTE = Tk.Frame(fileInputTE, bg= 'white')
specChoicesTE.pack(side = Tk.TOP, anchor = Tk.W, pady = 30)
addFileTE = Tk.Frame(specChoicesTE, bg = 'white')
addFileTE.pack(side = Tk.TOP, anchor = Tk.W)
lAddFileTE = Tk.Label(addFileTE, text= "Add CUDA Source Code File: ", bg= 'white')
lAddFileTE.pack(side = Tk.LEFT)
eAddFileTE = Tk.Entry(addFileTE, width= 30, bg = 'white')
eAddFileTE.pack(side = Tk.LEFT)
bClearEntryTE = Tk.Button(addFileTE, text = "Clear", command = (lambda: clearField(eAddFileTE)))
bClearEntryTE.pack(side = Tk.LEFT)
bAddFileBrowseTE = Tk.Button(addFileTE, text = "Browse", command = (lambda: eAddFileTE.insert(0,Fd.askopenfilename(initialdir=recentfile ))))
bAddFileBrowseTE.pack(side = Tk.LEFT)
bAddFileRecentFilesTE = Tk.Button(addFileTE, text = "Recent Files", command = (lambda: loadRecentFile(eAddFileTE)))
bAddFileRecentFilesTE.pack(side = Tk.LEFT)
### Input Corresponding PTX file ###########
addFileTEPTX = Tk.Frame(specChoicesTE, bg = 'white')
addFileTEPTX.pack(side = Tk.TOP, anchor = Tk.W)
lAddFileTEPTX = Tk.Label(addFileTEPTX, text= "Add Corresponding PTX File: ", bg= 'white')
lAddFileTEPTX.pack(side = Tk.LEFT)
eAddFileTEPTX = Tk.Entry(addFileTEPTX, width= 30, bg = 'white')
eAddFileTEPTX.pack(side = Tk.LEFT)
bClearEntryTEPTX = Tk.Button(addFileTEPTX, text = "Clear", command = (lambda: clearField(eAddFileTEPTX)))
bClearEntryTEPTX.pack(side = Tk.LEFT)
bAddFileBrowseTEPTX = Tk.Button(addFileTEPTX, text = "Browse", command = (lambda: eAddFileTEPTX.insert(0,Fd.askopenfilename(initialdir=recentfile )))) #"/home/taamodt/fpga_simulation/run/"
bAddFileBrowseTEPTX.pack(side = Tk.LEFT)
bAddFileRecentFilesTEPTX = Tk.Button(addFileTEPTX, text = "Recent Files", command = (lambda: loadRecentFile(eAddFileTEPTX)))
bAddFileRecentFilesTEPTX.pack(side = Tk.LEFT)
lNote = Tk.Label(addFileTEPTX, text = '*Must include at least PTX and Stat files before pressing the submit button', bg = 'white')
lNote.pack(side = Tk.LEFT)
#### Input the corresponding stat file ##################
addFileTEStat = Tk.Frame(specChoicesTE, bg = 'white')
addFileTEStat.pack(side = Tk.TOP, anchor = Tk.W)
lAddFileTEStat = Tk.Label(addFileTEStat, text= "Add Corresponding Stat File: ", bg= 'white')
lAddFileTEStat.pack(side = Tk.LEFT)
eAddFileTEStat = Tk.Entry(addFileTEStat, width= 30, bg = 'white')
eAddFileTEStat.pack(side = Tk.LEFT)
bClearEntryTEStat = Tk.Button(addFileTEStat, text = "Clear", command = (lambda: clearField(eAddFileTEStat)))
bClearEntryTEStat.pack(side = Tk.LEFT)
bAddFileBrowseTEStat = Tk.Button(addFileTEStat, text = "Browse", command = (lambda: eAddFileTEStat.insert(0,Fd.askopenfilename(initialdir=recentfile )))) #"/home/taamodt/fpga_simulation/run/"
bAddFileBrowseTEStat.pack(side = Tk.LEFT)
bAddFileRecentFilesTEStat = Tk.Button(addFileTEStat, text = "Recent Files", command = (lambda: loadRecentFile(eAddFileTEStat)))
bAddFileRecentFilesTEStat.pack(side = Tk.LEFT)
bAddFileSubmitTEStat = Tk.Button(addFileTEStat, text = "Add Files", command = lambda: addToListboxTE([cFilesAddedTE,cFilesAddedTEPTX, cFilesAddedTEStat],
[eAddFileTE,eAddFileTEPTX, eAddFileTEStat]), bg = 'green')
bAddFileSubmitTEStat.pack(side = Tk.LEFT)
#### Display text file Chosen and stat file Chosen ###########
#TEXT FILES CHOSEN
filesAddedTE = Tk.Frame(specChoicesTE, bg = 'white')
filesAddedTE.pack(side = Tk.TOP, anchor = Tk.W)
lFilesAddedTE = Tk.Label(filesAddedTE, text = "CUDA Source Code File Added: ", bg = 'white')
lFilesAddedTE.pack(side = Tk.LEFT)
cFilesAddedTE = Tk.Listbox(filesAddedTE, width = 100, height = 3, bg = 'white')
cFilesAddedTE.pack(side = Tk.LEFT)
#Corresponding PTX File Chosen
filesAddedTEPTX = Tk.Frame(specChoicesTE, bg = 'white')
filesAddedTEPTX.pack(side = Tk.TOP, anchor = Tk.W)
lFilesAddedTEPTX = Tk.Label(filesAddedTEPTX, text = "Corresponding PTX Files Added: ", bg = 'white')
lFilesAddedTEPTX.pack(side = Tk.LEFT)
cFilesAddedTEPTX = Tk.Listbox(filesAddedTEPTX, width = 100, height = 3, bg = 'white')
cFilesAddedTEPTX.pack(side = Tk.LEFT, padx = 15)
bFilesAddedRemTE = Tk.Button(filesAddedTE, text = "Remove", command = (lambda: delFileFromListbox(cFilesAdded)))
bFilesAddedRemTE.pack(side = Tk.LEFT)
#CORRESPONDING STAT FILES CHOSEN
filesAddedTEStat = Tk.Frame(specChoicesTE, bg = 'white')
filesAddedTEStat.pack(side = Tk.TOP, anchor = Tk.W)
lFilesAddedTEStat = Tk.Label(filesAddedTEStat, text = "Corresponding Stat Files Added: ", bg = 'white')
lFilesAddedTEStat.pack(side = Tk.LEFT)
cFilesAddedTEStat = Tk.Listbox(filesAddedTEStat, width = 100, height = 3, bg = 'white')
cFilesAddedTEStat.pack(side = Tk.LEFT, padx = 15)
bSUBMIT = Tk.Button(root, text = "Submit", font = ("Gill Sans MT", 12, "bold"), width = 10, command = lambda: submitClicked(instance, num.get(), skipCFLogVar.get(), cflog2cuda.get(), [cFilesAddedTE, cFilesAddedTEPTX, cFilesAddedTEStat]))
bSUBMIT.pack(pady = 5)
instance.mainloop()
def loadRecentFile(entry):
instance = Tk.Toplevel(bg = 'white')
instance.title("Recent Files")
try:
loadfile = open(os.path.join(userSettingPath, 'recentfiles.txt'), 'r')
recentfiles = loadfile.readlines()
except IOError,e:
if e.errno == 2:
recentfiles = ''
else:
raise e
recentFileWindow = Tk.Frame(instance, bg = 'white')
recentFileWindow.pack(side = Tk.TOP)
scrollbar = Tk.Scrollbar(recentFileWindow, orient = Tk.VERTICAL)
cRecentFile = Tk.Listbox(recentFileWindow, width = 100, height = 15, yscrollcommand = scrollbar.set)
cRecentFile.bind("<Double-Button-1>", lambda(event): recentFileInsert(entry, cRecentFile.get('active'), instance))
cRecentFile.pack(side = Tk.LEFT)
scrollbar.config(command = cRecentFile.yview)
scrollbar.pack(side = Tk.LEFT, fill = Tk.Y)
tmp = []
for x in range(len(recentfiles) - 1, 0, -1):
try:
tmp.index(recentfiles[x][0:-1])
pass
except:
tmp.append(recentfiles[x][0:-1])
for x in range(0,len(tmp)):
cRecentFile.insert(Tk.END, tmp[x])
belowRecentFileWindow = Tk.Frame(instance, bg = 'white')
belowRecentFileWindow.pack(side = Tk.BOTTOM)
bRecentFile = Tk.Button(belowRecentFileWindow , text = "Submit", command = lambda: recentFileInsert(entry, cRecentFile.get('active'), instance))
bRecentFile.pack()
bRecentFileCancel = Tk.Button(belowRecentFileWindow , text = 'Cancel', command = (lambda: instance.destroy()))
bRecentFileCancel.pack()
def recentFileInsert(entry, string, window):
window.destroy()
entry.insert(0, string)
def clearField(entry):
entry.delete(0,Tk.END)
def delFileFromListbox(filesListbox):
for files in Filenames:
if files[-80:] == filesListbox.get('active')[-80:]:
Filenames.remove(files)
filesListbox.delete(Tk.ANCHOR)
def addToListboxTE(listbox, entry):
for iter in range(1,len(listbox)):
try:
test = open(entry[iter].get(), 'r')
except:
errorMsg('Could not open file ' + entry[iter].get())
return
for iter in range(0,len(listbox)):
listbox[iter].insert(Tk.END, entry[iter].get())
entry[iter].delete(0,Tk.END)
def addToListbox(listbox, string, entry):
try:
test = open(string, 'r')
Filenames.append(string)
listbox.insert(Tk.END, string)
entry.delete(0,Tk.END)
except:
errorMsg('Could not open file')
return 0
def addListToListbox(listbox,list):
for file in list:
try:
string = os.path.abspath(file)
if os.path.isfile(string):
Filenames.append(string)
listbox.insert(Tk.END, string)
else:
print 'Could not open file: ' + string
except:
print 'Could not open file: ' + file
def errorMsg(string):
error = Tk.Toplevel(bg = 'white')
error.title("Error Message")
tError = Tk.Label(error, text = "Error", font = ("Gills Sans MT", 20, "underline", "bold"), bg = "red")
tError.pack(side = Tk.TOP, pady = 20)
lError = Tk.Label(error, text = string, font = ("Gills Sans MT", 15, "bold"), bg = 'white')
lError.pack(pady = 10, padx = 10)
bError = Tk.Button(error, text = "OK", font = ("Times New Roman", 14), command = (lambda: error.destroy()))
bError.pack(pady = 10)
def submitClicked(instance, num, skipcflog, cflog2cuda, listboxes):
for iter in range(0, len(listboxes)):
if iter == 0:
TEFiles = listboxes[iter].get(0, Tk.END)
if iter == 1:
TEPTXFiles = listboxes[iter].get(0, Tk.END)
else:
TEStatFiles = listboxes[iter].get(0, Tk.END)
organizedata.skipCFLog = skipcflog
lexyacc.skipCFLOGParsing = skipcflog
organizedata.convertCFLog2CUDAsrc = cflog2cuda
start = 0
if (not os.path.exists(userSettingPath)):
os.makedirs(userSettingPath)
f_recentFiles = open(os.path.join(userSettingPath, 'recentfiles.txt'), 'a')
for files in Filenames:
f_recentFiles.write(files + '\n')
for files in TEFiles:
f_recentFiles.write(files + '\n')
for files in TEPTXFiles:
f_recentFiles.write(files + '\n')
for files in TEStatFiles:
f_recentFiles.write(files + '\n')
f_recentFiles.close()
if num == '1':
res = 'small'
elif num == '2':
res = 'medium'
else:
res = 'big'
instance.destroy()
startup(res, [TEFiles, TEPTXFiles, TEStatFiles])
def graphAddTab(vars, graphTabs,res, entry):
TabsForGraphs.append(guiclasses.formEntry(graphTabs, str(len(TabsForGraphs) + 1), vars, res, entry))
entry.delete(0, Tk.END)
entry.insert(0, 'TabTitle?')
def remTab(graphTabs):
graphTabs.delete(Pmw.SELECT)
def destroy(instance, quit):
quit.destroy()
instance.destroy()
def tmpquit(instance):
quit = Tk.Toplevel(bg = 'white')
quit.title("...")
tQuit = Tk.Label(quit, text = "Quit?", font = ("Gills Sans MT", 20, "underline", "bold"), bg = "white")
tQuit.pack(side = Tk.TOP, pady = 20)
lQuit = Tk.Label(quit, text = "Are you sure you want to quit?", font = ("Gills Sans MT", 15, "bold"), bg = 'white')
lQuit.pack(side = Tk.TOP, pady = 20, padx = 10)
bQuit = Tk.Button(quit, text = "Yes", font = ("Time New Roman", 13), command = (lambda: destroy(instance, quit)))
bQuit.pack(side = Tk.LEFT, anchor = Tk.W, pady = 5, padx = 5)
bNo = Tk.Button(quit, text = "No", font= ("Time New Roman", 13), command = (lambda: quit.destroy()))
bNo.pack(side = Tk.RIGHT, pady = 5, padx = 5)
def startup(res, TEFILES):
global vars
# The Main Window Stuff
# Instantiate the window
instance = Tk.Tk();
instance.title("AerialVision GPU Graphing Tool")
#set the window size
if res == 'small':
root = Tk.Frame(instance, width = 1325, height = 850, bg = 'white');
elif res == 'medium':
root = Tk.Frame(instance, width = 1700, height = 1100, bg = 'white');
else:
root = Tk.Frame(instance, width = 1700, height = 1100, bg = 'white');
root.pack_propagate(0);
root.pack();
# User can choose between time lapse view and source code view
chooseTextVisualizer = Pmw.NoteBook(root)
chooseTextVisualizer.pack(fill= 'both', expand = 'true')
visualizer = chooseTextVisualizer.add('Time Lapse View')
textEditor = chooseTextVisualizer.add('Source Code View')
#INITIALIZING THE TIME LAPSE VIEW
#The top frame for the control panel
# Frame for Control Panel
if res == 'small':
controlPanel = Tk.Frame(visualizer, width=1250, height= 50, bg ="beige", borderwidth = 5, relief = Tk.GROOVE);
elif res == 'medium':
controlPanel = Tk.Frame(visualizer, width=1530, height= 50, bg ="beige", borderwidth = 5, relief = Tk.GROOVE);
else:
controlPanel = Tk.Frame(visualizer, width=1530, height= 50, bg ="beige", borderwidth = 5, relief = Tk.GROOVE);
controlPanel.pack(anchor = Tk.N, pady = 5);
controlPanel.pack_propagate(0)
# Control Panel Title
controlTitle = Tk.Frame(controlPanel, bg = 'beige')
controlTitle.pack(side = Tk.LEFT)
lControlTitle = Tk.Label(controlTitle, text='Control Panel: ', font = ("Gills Sans MT", 15, "bold"), bg = "beige");
lControlTitle.pack(side = Tk.LEFT)
#Number of Tabs Frame)
numbTabs = Tk.Frame(controlPanel, bg = 'beige')
numbTabs.pack(side = Tk.LEFT)
eAddTab = Tk.Entry(numbTabs)
bRemTab = Tk.Button(numbTabs, text = "Rem Tab", command = (lambda: remTab(graphTabs)), bg = 'red')
bRemTab.pack(side=Tk.LEFT)
bAddTab = Tk.Button(numbTabs, text = "Add Tab", command = (lambda: graphAddTab(vars, graphTabs, res, eAddTab)))
bAddTab.pack(side=Tk.LEFT)
eAddTab.pack(side = Tk.LEFT)
eAddTab.insert(0, 'TabTitle?')
bManageFiles = Tk.Button(numbTabs, text = "Manage Files", command = lambda: manageFiles())
bManageFiles.pack(side = Tk.LEFT)
#Quit or Open up new Window Frame
quitNew = Tk.Frame(controlPanel, bg = 'beige')
quitNew.pack(side = Tk.RIGHT, padx = 10)
bQuit = Tk.Button(quitNew, text = "Quit", bg = 'red', command = (lambda: tmpquit(instance)))
bQuit.pack(side = Tk.LEFT)
#The bottom Frame that contains tabs,graphs,etc...
#Instantiating the Main frame
#Frame for Graphing Area
if res == 'small':
graphMainFrame = Tk.Frame(visualizer, width = 1250, height = 750, borderwidth = 5, relief = Tk.GROOVE);
elif res == 'medium':
graphMainFrame = Tk.Frame(visualizer, width = 1615, height = 969, borderwidth = 5, relief = Tk.GROOVE);
else:
graphMainFrame = Tk.Frame(visualizer, width = 1615, height = 969, borderwidth = 5, relief = Tk.GROOVE);
graphMainFrame.pack(pady = 5);
graphMainFrame.pack_propagate(0);
#Setting up the Tabs
graphTabs = Pmw.NoteBook(graphMainFrame)
graphTabs.pack(fill = 'both', expand = 'true')
#Class newTab will take "graphTabs" which is the widget on top of graphMainFrame and create a new tab
#for every instance of the class
# Here we extract the available data that can be graphed by the user
for files in Filenames:
vars[files] = lexyacc.parseMe(files)
markForDel = {}
for files in vars:
markForDel[files] = []
for variables in vars[files]:
if variables == 'CFLOG':
continue
if variables == 'EXTVARS':
continue
if checkEmpty(vars[files][variables].data) == 0:
markForDel[files].append(variables)
for files in markForDel:
for variables in markForDel[files]:
del vars[files][variables]
organizedata.setCFLOGInfoFiles(TEFILES)
for files in Filenames:
vars[files] = organizedata.organizedata(vars[files])
graphAddTab(vars, graphTabs, res, eAddTab)
# INITIALIZING THE SOURCE CODE VIEW
if res == 'small':
textControlPanel = Tk.Frame(textEditor, width = 1250, height = 50, bg = 'beige', borderwidth = 5, relief = Tk.GROOVE)
elif res == 'medium':
textControlPanel = Tk.Frame(textEditor, width = 1530, height = 50, bg = 'beige', borderwidth = 5, relief = Tk.GROOVE)
else:
textControlPanel = Tk.Frame(textEditor, width = 1530, height = 50, bg = 'beige', borderwidth = 5, relief = Tk.GROOVE)
textControlPanel.pack(anchor = Tk.N, pady = 5)
textControlPanel.pack_propagate(0)
lTextControlPanel = Tk.Label(textControlPanel, text = 'Control Panel: ', font = ("Gills Sans MT", 15, "bold"), bg = "beige")
lTextControlPanel.pack(side = Tk.LEFT)
bTextRemTab = Tk.Button(textControlPanel, text = 'Rem Tab', command = (lambda: textRemTab(textTabs)), bg = 'red')
bTextRemTab.pack(side = Tk.LEFT)
bTextAddTab = Tk.Button(textControlPanel, text = 'AddTab', command = (lambda: textAddTab(textTabs,res, TEFILES)))
bTextAddTab.pack(side= Tk.LEFT)
bTextManageFiles = Tk.Button(textControlPanel, text = 'Manage Files', command = (lambda: textManageFiles()))
bTextManageFiles.pack(side = Tk.LEFT)
#Quit or Open up new Window Frame
textquitNew = Tk.Frame(textControlPanel, bg = 'beige')
textquitNew.pack(side = Tk.RIGHT, padx = 10)
bTextQuit = Tk.Button(textquitNew, text = "Quit", bg = 'red', command = (lambda: tmpquit(instance)))
bTextQuit.pack(side = Tk.LEFT)
if res == 'small':
textMainFrame = Tk.Frame(textEditor, width = 1250, height = 750, borderwidth = 5, relief = Tk.GROOVE)
elif res == 'medium':
textMainFrame = Tk.Frame(textEditor, width = 1615, height = 969, borderwidth = 5, relief = Tk.GROOVE)
else:
textMainFrame = Tk.Frame(textEditor, width = 1615, height = 969, borderwidth = 5, relief = Tk.GROOVE)
textMainFrame.pack(pady = 5)
textMainFrame.pack_propagate(0)
textTabs = Pmw.NoteBook(textMainFrame)
textTabs.pack(fill = 'both', expand = 'true')
textAddTab(textTabs, res, TEFILES)
instance.mainloop()
def textManageFiles():
textManageFiles = Tk.Toplevel(bg = 'white')
title = Tk.Label(textManageFiles, text = 'Manage Files', font = ("Gill Sans MT", 15, "bold", "underline"), bg= 'white' )
title.pack(side = Tk.TOP)
bottomFrameMaster = Tk.Frame(textManageFiles, bg= 'white')
bottomFrameMaster.pack(side = Tk.TOP, padx = 20, pady = 20)
bottomFrame1 = Tk.Frame(bottomFrameMaster, bg= 'white')
bottomFrame1.pack(side = Tk.LEFT, padx = 20, pady = 20)
bottomFrameOption = Tk.Frame(bottomFrameMaster, bg = 'white')
bottomFrameOption.pack(side = Tk.LEFT)
ltextCurrentFiles = Tk.Label(bottomFrame1, text= 'Current Files: ', bg = 'white')
ltextCurrentFiles.pack(side = Tk.LEFT)
bottomFrame2 = Tk.Frame(textManageFiles, bg= 'white')
bottomFrame2.pack(side = Tk.TOP, anchor= Tk.W)
ctextCurrentFiles = Tk.Listbox(bottomFrame1, width = 100)
ctextCurrentFiles.pack(side = Tk.LEFT)
lSubmittedChanges= Tk.Label(bottomFrame2, text='Changes: ', bg = 'white')
lSubmittedChanges.pack(side = Tk.LEFT, padx=35, pady =15)
cSubmittedChanges = Tk.Listbox(bottomFrame2, width = 100)
cSubmittedChanges.pack(side = Tk.LEFT,pady = 15)
for files in Filenames:
ctextCurrentFiles.insert(Tk.END, files)
btextAddFile = Tk.Button(bottomFrameOption, text = 'Add File', command = lambda: textAddFile(bottomFrameMaster,cSubmittedChanges, textManageFiles))
btextAddFile.pack()
btextRemFile = Tk.Button(bottomFrameOption, text= 'Remove File')
btextRemFile.pack(side = Tk.LEFT)
def textAddFile(frame, listbox, master):
addFileFrame = Tk.Frame(frame, bg = 'white')
addFileFrame.pack(side = Tk.RIGHT,padx = 15)
topFrame = Tk.Frame(addFileFrame, bg= 'white')
topFrame.pack(side = Tk.TOP)
bottomFrame = Tk.Frame(addFileFrame, bg = 'white')
bottomFrame.pack(side = Tk.TOP)
lSourceCode = Tk.Label(topFrame, text = 'Source Code File', bg = 'white')
lSourceCode.pack(side = Tk.LEFT)
eSourceCode = Tk.Entry(topFrame)
eSourceCode.pack(side = Tk.LEFT)
bSourceCodeClearEntry = Tk.Button(topFrame, text = "Clear", command = lambda: (eSourceCode.delete(0, Tk.END)))
bSourceCodeClearEntry.pack(side = Tk.LEFT)
bSourceCodeAddFileBrowse = Tk.Button(topFrame, text = "Browse", command = (lambda: eSourceCode.insert(0,Fd.askopenfilename())))
bSourceCodeAddFileBrowse.pack(side = Tk.LEFT)
bSourceCodeAddFileRecentFiles = Tk.Button(topFrame, text = "Recent Files", command = (lambda: loadRecentFile(eAddFile)))
bSourceCodeAddFileRecentFiles.pack(side = Tk.LEFT)
lStatFile = Tk.Label(bottomFrame, text= 'Corresponding Stat File', bg = 'white')
lStatFile.pack(side = Tk.LEFT)
eStatFile = Tk.Entry(bottomFrame)
eStatFile.pack(side = Tk.LEFT)
bSourceCodeClearEntry = Tk.Button(bottomFrame, text = "Clear", command = lambda: (eStatFile.delete(0, Tk.END)))
bSourceCodeClearEntry.pack(side = Tk.LEFT)
bSourceCodeAddFileBrowse = Tk.Button(bottomFrame, text = "Browse", command = (lambda: eStatFile.insert(0,Fd.askopenfilename())))
bSourceCodeAddFileBrowse.pack(side = Tk.LEFT)
bSourceCodeAddFileRecentFiles = Tk.Button(bottomFrame, text = "Recent Files", command = (lambda: loadRecentFile(eAddFile)))
bSourceCodeAddFileRecentFiles.pack(side = Tk.LEFT)
bSubmit = Tk.Button(addFileFrame, text = "Submit", command = lambda: sourceCodeAddFileSubmit(eSourceCode, eStatFile, listbox, master))
bSubmit.pack(side = Tk.BOTTOM)
def sourceCodeAddFileSubmit(eSourceCode, eStatFile, listbox, frame):
source = open(eSourceCode.get(), 'r')
stat = open(eStatFile.get(), 'r')
SourceCode[eSourceCode.get()] = [source, stat]
frame.destroy()
def textAddTab(textTabs,res, TEFILES):
TabsForText.append(guiclasses.newTextTab(textTabs, str(len(TabsForText) + 1), res, TEFILES))
def textRemTab(textTabs):
textTabs.delete(Pmw.SELECT)
def manageFiles():
manageFilesWindow = Tk.Toplevel(bg = 'white')
manageFilesWindow.title("Manage Files")
titleFrame = Tk.Frame(manageFilesWindow, bg = 'white')
titleFrame.pack(side = Tk.TOP)
lTitle = Tk.Label(titleFrame, bg = 'white', text = "Manage Files" ,font = ("Gill Sans MT", 15, "bold", "underline"))
lTitle.pack(side = Tk.LEFT)
bHelp = Tk.Button(titleFrame, text = " ? ")
bHelp.pack(side = Tk.LEFT, padx = 10)
optionsFrame = Tk.Frame(manageFilesWindow, bg = 'white')
optionsFrame.pack(side = Tk.TOP, padx = 20, pady = 20)
lCurrentFiles = Tk.Label(optionsFrame, text = "Current Files: ", bg= 'white')
lCurrentFiles.pack(side = Tk.LEFT)
cCurrentFiles = Tk.Listbox(optionsFrame, width = 100)
cCurrentFiles.pack(side = Tk.LEFT)
for files in Filenames:
cCurrentFiles.insert(Tk.END, files)
buttonsFrame = Tk.Frame(optionsFrame, bg = 'white')
buttonsFrame.pack(side = Tk.LEFT, padx = 20)
bAdd = Tk.Button(buttonsFrame, text = "Add", width = 10, command = lambda: manageFilesAddFile(optionsFrame, cSubmittedChanges))
bAdd.pack(side = Tk.TOP)
bRemove = Tk.Button(buttonsFrame, text = "Remove", width = 10, command = (lambda: manageFilesDelFile(cCurrentFiles, cSubmittedChanges)))
bRemove.pack(side = Tk.TOP)
bRefresh = Tk.Button(buttonsFrame, text = "Refresh", width = 10, command = (lambda: manageFilesRefreshFile(cCurrentFiles, cSubmittedChanges)))
bRefresh.pack(side = Tk.TOP)
bSubmit = Tk.Button(buttonsFrame, text = "Submit Changes", width = 10, command = lambda: manageFilesSubmit(manageFilesWindow, cSubmittedChanges))
bSubmit.pack(side = Tk.TOP)
bCancel = Tk.Button(buttonsFrame, text = "Omit Changes", width = 10, command = lambda: manageFilesOmitChanges(manageFilesWindow))
bCancel.pack(side = Tk.LEFT)
submittedChangesFrame = Tk.Frame(manageFilesWindow, bg= 'white')
submittedChangesFrame.pack(side = Tk.TOP, anchor = Tk.W, pady = 10, padx = 20)
lSubmittedChanges = Tk.Label(submittedChangesFrame, text = "Changes: ", bg= 'white')
lSubmittedChanges.pack(side = Tk.LEFT)
cSubmittedChanges = Tk.Listbox(submittedChangesFrame, width = 100)
cSubmittedChanges.pack(side = Tk.LEFT)
def manageFilesOmitChanges(window):
window.destroy()
def manageFilesAddFile(frame, listbox):
addFrame = Tk.Frame(frame, bg = 'white')
addFrame.pack(side = Tk.LEFT, anchor = Tk.N)
lTitle = Tk.Label(addFrame, text = "Add a New File", bg = 'white')
lTitle.pack(side = Tk.TOP)
widgetsForAddFrame = Tk.Frame(addFrame, bg = 'white')
widgetsForAddFrame.pack(side = Tk.TOP)
eAddFile = Tk.Entry(widgetsForAddFrame, width= 30, bg = 'white')
eAddFile.pack(side = Tk.LEFT)
bClearEntry = Tk.Button(widgetsForAddFrame, text = "Clear", command = lambda: (clearField(eAddFile)))
bClearEntry.pack(side = Tk.LEFT)
bAddFileSubmit = Tk.Button(widgetsForAddFrame, text = "Submit", command = lambda: manageFilesAddFileSubmit(eAddFile, listbox))
bAddFileSubmit.pack(side = Tk.LEFT)
bAddFileBrowse = Tk.Button(widgetsForAddFrame, text = "Browse", command = (lambda: eAddFile.insert(0,Fd.askopenfilename())))
bAddFileBrowse.pack(side = Tk.LEFT)
bAddFileRecentFiles = Tk.Button(widgetsForAddFrame, text = "Recent Files", command = (lambda: loadRecentFile(eAddFile)))
bAddFileRecentFiles.pack(side = Tk.LEFT)
bCancel = Tk.Button(addFrame, text = "<--", command = lambda: addFrame.destroy())
bCancel.pack(side = Tk.TOP, anchor = Tk.W, pady = 20)
def manageFilesAddFileSubmit(entry, listbox):
try:
tmpList = listbox.get(0,Tk.END)
index = tmpList.index('Add File: ' + entry.get())
errorMsg("This request is already in the queue")
except:
listbox.insert(Tk.END, 'Add File: ' + entry.get())
entry.delete(0,Tk.END)
def manageFilesRefreshFile(filesListbox, listbox):
try:
tmpList = listbox.get(0,Tk.END)
index = tmpList.index("Refresh File: " + filesListbox.get('active'))
errorMsg("This request is already in the queue")
except:
listbox.insert(Tk.END, "Refresh File: " + filesListbox.get('active'))
def manageFilesDelFile(filesListbox, listbox):
try:
tmpList = listbox.get(0,Tk.END)
index = tmpList.index("Delete File: " + filesListbox.get('active'))
errorMsg("This request is already in the queue")
except:
listbox.insert(Tk.END, "Delete File: " + filesListbox.get('active'))
def manageFilesSubmit(window, listbox):
global vars
submittedEntries = listbox.get(0, Tk.END)
count = 0
for entries in submittedEntries:
if entries[0:3] == 'Add':
#try:
test = open(entries[10:], 'r')
Filenames.append(entries[10:])
vars[entries[10:]] = lexyacc.parseMe(entries[10:])
markForDel = []
for variables in vars[entries[10:]]:
if (variables != 'CFLOG' and checkEmpty(vars[entries[10:]][variables].data) == 0):
markForDel.append(variables)
for variables in markForDel:
del vars[entries[10:]][variables]
vars[entries[10:]] = organizedata.organizedata(vars[entries[10:]])
#except:
# errorMsg('Could not open file' + str(count))
elif entries[0:7] == 'Refresh':
del vars[entries[14:]]
vars[entries[14:]] = lexyacc.parseMe(entries[14:])
markForDel = []
for variables in vars[entries[14:]]:
if checkEmpty(vars[entries[14:]][variables].data) == 0:
markForDel.append(variables)
for variables in markForDel:
del vars[entries[14:]][variables]
vars[entries[14:]] = organizedata.organizedata(vars[entries[14:]])
elif entries[0:6] == 'Delete':
del vars[entries[13:]]
Filenames.remove(entries[13:])
else:
errorMsg('This is a bug... please submit bug report')
window.destroy()
| 42.159817
| 241
| 0.653065
|
c57c0f483e1ba06d5114da49cbadbc07b32e61c6
| 5,077
|
py
|
Python
|
tests/nightly/model_backwards_compatibility_check/model_backwards_compat_inference.py
|
t-triobox/incubator-mxnet
|
93aa9e33fcb2f216179c691ed9461bc96e37ae70
|
[
"Apache-2.0"
] | 1
|
2021-11-09T01:40:17.000Z
|
2021-11-09T01:40:17.000Z
|
tests/nightly/model_backwards_compatibility_check/model_backwards_compat_inference.py
|
t-triobox/incubator-mxnet
|
93aa9e33fcb2f216179c691ed9461bc96e37ae70
|
[
"Apache-2.0"
] | null | null | null |
tests/nightly/model_backwards_compatibility_check/model_backwards_compat_inference.py
|
t-triobox/incubator-mxnet
|
93aa9e33fcb2f216179c691ed9461bc96e37ae70
|
[
"Apache-2.0"
] | 1
|
2018-07-19T00:43:30.000Z
|
2018-07-19T00:43:30.000Z
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .common import *
def test_lenet_gluon_load_params_api():
model_name = 'lenet_gluon_save_params_api'
logging.info('Performing inference for model/API %s' % model_name)
for folder in get_top_level_folders_in_bucket(s3, model_bucket_name):
logging.info('Fetching files for MXNet version : %s and model %s' % (folder, model_name))
model_files = download_model_files_from_s3(model_name, folder)
if len(model_files) == 0:
logging.warn('No training files found for %s for MXNet version : %s' % (model_name, folder))
continue
data = mx.npx.load(''.join([model_name, '-data']))
test_data = data['data']
# Load the model and perform inference
loaded_model = Net()
loaded_model.load_params(model_name + '-params')
output = loaded_model(test_data)
old_inference_results = mx.npx.load(model_name + '-inference')['inference']
assert_almost_equal(old_inference_results.asnumpy(), output.asnumpy(), rtol=rtol_default, atol=atol_default)
clean_model_files(model_files, model_name)
logging.info('=================================')
logging.info('Assertion passed for model : %s' % model_name)
def test_lenet_gluon_hybrid_imports_api():
model_name = 'lenet_gluon_hybrid_export_api'
logging.info('Performing inference for model/API %s' % model_name)
for folder in get_top_level_folders_in_bucket(s3, model_bucket_name):
logging.info('Fetching files for MXNet version : %s and model %s' % (folder, model_name))
model_files = download_model_files_from_s3(model_name, folder)
if len(model_files) == 0:
logging.warn('No training files found for %s for MXNet version : %s' % (model_name, folder))
continue
# Load the model and perform inference
data = mx.npx.load(''.join([model_name, '-data']))
test_data = data['data']
loaded_model = HybridNet()
loaded_model = gluon.SymbolBlock.imports(model_name + '-symbol.json', ['data'], model_name + '-0000.params')
output = loaded_model(test_data)
old_inference_results = mx.npx.load(model_name + '-inference')['inference']
assert_almost_equal(old_inference_results.asnumpy(), output.asnumpy(), rtol=rtol_default, atol=atol_default)
clean_model_files(model_files, model_name)
logging.info('=================================')
logging.info('Assertion passed for model : %s' % model_name)
def test_lstm_gluon_load_parameters_api():
# If this code is being run on version >= 1.2.0 only then execute it,
# since it uses save_parameters and load_parameters API
if compare_versions(str(mxnet_version), '1.2.1') < 0:
logging.warn('Found MXNet version %s and exiting because this version does not contain save_parameters'
' and load_parameters functions' % str(mxnet_version))
return
model_name = 'lstm_gluon_save_parameters_api'
logging.info('Performing inference for model/API %s and model' % model_name)
for folder in get_top_level_folders_in_bucket(s3, model_bucket_name):
logging.info('Fetching files for MXNet version : %s' % folder)
model_files = download_model_files_from_s3(model_name, folder)
if len(model_files) == 0:
logging.warn('No training files found for %s for MXNet version : %s' % (model_name, folder))
continue
data = mx.npx.load(''.join([model_name, '-data']))
test_data = data['data']
# Load the model and perform inference
loaded_model = SimpleLSTMModel()
loaded_model.load_parameters(model_name + '-params')
output = loaded_model(test_data)
old_inference_results = mx.npx.load(model_name + '-inference')['inference']
assert_almost_equal(old_inference_results.asnumpy(), output.asnumpy(), rtol=rtol_default, atol=atol_default)
clean_model_files(model_files, model_name)
logging.info('=================================')
logging.info('Assertion passed for model : %s' % model_name)
if __name__ == '__main__':
test_lenet_gluon_load_params_api()
test_lenet_gluon_hybrid_imports_api()
test_lstm_gluon_load_parameters_api()
| 48.352381
| 116
| 0.688005
|
bc1683cc694d62cdb9ed9b67eefcb257d3c29e6d
| 5,014
|
py
|
Python
|
python/get_cooccurrence.py
|
undertherain/nlp_cooc
|
e316740c469e4ade6ba064e6756057fee10466ed
|
[
"Apache-2.0"
] | 1
|
2017-09-20T03:04:47.000Z
|
2017-09-20T03:04:47.000Z
|
python/get_cooccurrence.py
|
undertherain/nlp_cooc
|
e316740c469e4ade6ba064e6756057fee10466ed
|
[
"Apache-2.0"
] | null | null | null |
python/get_cooccurrence.py
|
undertherain/nlp_cooc
|
e316740c469e4ade6ba064e6756057fee10466ed
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import line_profiler
from timeit import default_timer as timer
import sys
import glob
import os
import re
import fnmatch
import math
import numpy as np
from vocabulary import Vocabulary
import collections
from scipy import sparse
from scipy.sparse import * # dok_matrix
from swig.array_of_trees import ArrayOfTrees
def _my_get_index_dtype(*a, **kw):
return np.int64
sparse.sputils.get_index_dtype = _my_get_index_dtype
sparse.compressed.get_index_dtype = _my_get_index_dtype
sparse.csr.get_index_dtype = _my_get_index_dtype
sparse.csr_matrix.get_index_dtype = _my_get_index_dtype
sparse.bsr.get_index_dtype = _my_get_index_dtype
argv = sys.argv
if len(argv) < 3:
print("usage: input_dir output_dir")
exit()
name_dir_in = argv[1]
name_dir_out = argv[2]
vocab = Vocabulary()
vocab.read_from_precomputed(name_dir_out)
size_window = 2
d = collections.deque(maxlen=size_window)
for i in range(size_window):
d.append(-1)
#matrix=dok_matrix((vocab.cnt_words, vocab.cnt_words), dtype=np.int64)
#matrix=lil_matrix((vocab.cnt_words, vocab.cnt_words), dtype=np.int64)
#matrix=dok_matrix((vocab.cnt_words, vocab.cnt_words), dtype=np.int64)
cnt_workers = 2
m = ArrayOfTrees(vocab.cnt_words)
def get_start_i(N, cnt_workers, id_worker):
if N < cnt_workers:
return min(N, id_worker)
length_of_range = ((N + 1) // cnt_workers)
start = length_of_range * id_worker
if id_worker < N % cnt_workers:
start += id_worker
else:
start += N % cnt_workers
return start
def get_interval(N, cnt_workers, id_worker):
return (get_start_i(N, cnt_workers, id_worker), get_start_i(N, cnt_workers, id_worker + 1))
def get_worker_id(N, cnt_workers, v):
if N < cnt_workers:
return v
length_of_range = ((N + 1) // cnt_workers)
remainder = N % cnt_workers
if v < remainder * (length_of_range + 1):
return v // (length_of_range + 1)
else:
return (v - remainder * (length_of_range + 1)) // length_of_range + N % cnt_workers
def accumulate(id1, id2):
# decide which worker accumulates
# matrix[id1,id2]+=1
m.accumulate(id1, id2)
pass
#@profile
def process_word(word):
id_word = vocab.get_id(word)
if word in {".", "!", "?", "…"}:
if True: # options.obey_sentence_bounds
id_word = -1
for i in range(size_window):
d.append(-1)
else:
if id_word < 0:
return
#print("word : '{}'".format(word))
d.append(id_word)
for i in range(size_window - 1):
if d[-1] == -1 or d[i] == -1:
continue
# print("accing",d[-1],d[i])
# print("accing",d[i],d[-1])
accumulate(d[-1], d[i])
accumulate(d[i], d[-1])
def process_file(name):
print("processing " + name)
f = open(name, errors="replace")
for line in f:
s = line.strip().lower()
re_pattern = r"[\w\-']+|[.,!?…]"
tokens = re.findall(re_pattern, s)
for token in tokens:
process_word(token)
start = timer()
for root, dir, files in os.walk(name_dir_in, followlinks=True):
for items in fnmatch.filter(files, "*"):
process_file(os.path.join(root, items))
end = timer()
print("done reading corpus, took", end - start)
start = end
print("-----dumping data------")
m.dump_csr(name_dir_out, vocab.l_frequencies)
exit()
print("-----converting to COO------")
matrix_coo = matrix.tocoo()
end = timer()
print(" took", end - start)
start = end
# matrix_coo.sort_indices()
# print(matrix)
print("-----converting to csr------")
matrix_csr = matrix_coo.tocsr()
end = timer()
print(" took", end - start)
start = end
print("-----converting back to coo------")
matrix_coo = matrix_csr.tocoo()
end = timer()
print(" took", end - start)
start = end
# print(matrix_coo)
cnt_words_processed = vocab.l_frequencies.sum()
# print(matrix_csr)
debug = False
if debug:
f_out = open("bigrams_list", "w")
for i in zip(matrix_coo.row, matrix_coo.col):
row = i[0]
col = i[1]
freq = matrix[i]
v = math.log2((freq * cnt_words_processed) /
(vocab.l_frequencies[col] * vocab.l_frequencies[col]))
f_out.write("{}\t{}\t{}\t{:0.5f}\n".format(
vocab.get_word_by_id(row), vocab.get_word_by_id(col), freq, v))
f_out.close()
# print(matrix_csr.indices.dtype)
data_pmi = np.zeros(matrix_csr.data.shape[0], dtype=np.float32)
ind = 0
for i in zip(matrix_coo.row, matrix_coo.col):
row = i[0]
col = i[1]
freq = matrix[i]
v = math.log2((freq * cnt_words_processed) /
(vocab.l_frequencies[col] * vocab.l_frequencies[col]))
data_pmi[ind] = v
ind += 1
# f_out=open("bigrams.data.bin","wb")
# f_out.close()
# print(matrix_csr.indices.dtype)
matrix_csr.indices.tofile(os.path.join(name_dir_out, "bigrams.col_ind.bin"))
matrix_csr.indptr.tofile(os.path.join(name_dir_out, "bigrams.row_ptr.bin"))
data_pmi.tofile(os.path.join(name_dir_out, "bigrams.data.bin"))
| 27.102703
| 95
| 0.654767
|
18997b79b6ce35db54775d5c6c152e40294265e4
| 4,275
|
py
|
Python
|
src/twitter_analysis_tools/plotting.py
|
dmmolitor/twitter_analysis_tools
|
0599b6c1a5093ea2fb916d5fb05df92786ab6a61
|
[
"MIT"
] | 1
|
2020-05-03T18:02:16.000Z
|
2020-05-03T18:02:16.000Z
|
src/twitter_analysis_tools/plotting.py
|
dmmolitor/twitter_analysis_tools
|
0599b6c1a5093ea2fb916d5fb05df92786ab6a61
|
[
"MIT"
] | null | null | null |
src/twitter_analysis_tools/plotting.py
|
dmmolitor/twitter_analysis_tools
|
0599b6c1a5093ea2fb916d5fb05df92786ab6a61
|
[
"MIT"
] | 1
|
2020-05-03T18:01:22.000Z
|
2020-05-03T18:01:22.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
sns.set(style="whitegrid", font_scale=1.5, context="talk")
"""
For details on the params below, see the matplotlib docs:
https://matplotlib.org/users/customizing.html
"""
plt.rcParams["axes.edgecolor"] = "0.6"
plt.rcParams["figure.dpi"] = 200
plt.rcParams["font.family"] = "serif"
plt.rcParams["grid.color"] = "0.85"
plt.rcParams["savefig.dpi"] = 300
plt.rcParams["legend.columnspacing"] *= 0.8
plt.rcParams["legend.edgecolor"] = "0.6"
plt.rcParams["legend.markerscale"] = 1.0
plt.rcParams["legend.framealpha"] = "1"
plt.rcParams["legend.handlelength"] *= 1.5
plt.rcParams["legend.numpoints"] = 2
plt.rcParams["text.usetex"] = True
plt.rcParams["xtick.major.pad"] = -3
plt.rcParams["ytick.major.pad"] = -2
def plot_data(
data,
x=None,
plot_type="lineplot",
filepath=None,
save_fig=True,
figsize=[12.0, 6.0],
):
"""
Args:
data (2d array): 2d array with dimensions: num_topics x num_time_slices
Examples:
A simple example.
>>> import numpy as np
>>> data = np.arange(40).reshape([4,10])
>>> plot_data(data, save_fig=False)
>>> plot_data(data, plot_type='lineplot', save_fig=False)
>>> plot_data(data, plot_type='stackplot', save_fig=False)
An example using a Pipeline.
>>> import numpy as np
>>> from functools import partial
>>> from twitter_analysis_tools.utils import Pipeline
>>> data = [i*np.arange(10).T for i in range(1, 20)]
>>> data_pipeline = Pipeline(data)
>>> data_pipeline = data_pipeline.add_map(partial(np.expand_dims, axis=1))
>>> topic_distributions = np.concatenate(list(data_pipeline), axis=1)
>>> plot_data(topic_distributions, plot_type='stackplot', save_fig=False)
>>> plot_data(topic_distributions, plot_type='lineplot', save_fig=False)
"""
# Get dimensions.
num_topics, num_time_slices = data.shape
sns.set_palette(sns.husl_palette(num_topics))
# Create labels.
# TODO: pass labels in as argument.
labels = ["Topic {}".format(i) for i in range(1, num_topics + 1)]
if x is None:
x = np.arange(num_time_slices)
# Plot
fig = plt.figure(figsize=figsize)
# Plot data.
if plot_type == "lineplot":
for topic in range(num_topics):
plt.plot(x, data[topic, :], label=labels[topic])
if plot_type == "stackplot":
plt.stackplot(x, data, labels=labels)
# Put the legend out of the figure
plt.legend(
bbox_to_anchor=(1.05, 0.5),
loc="center left",
borderaxespad=0.0,
prop={"size": 10},
)
plt.xticks(rotation=45)
if save_fig:
if filepath is None:
raise Exception("Filepath must be specified if save_fig=True.")
fig.savefig(filepath + ".svg", bbox_inches="tight", transparent=True)
fig.savefig(filepath + ".png", bbox_inches="tight", transparent=True)
plt.close()
def sliding_average(data, window=10):
"""Average data over sliding window.
Args:
data (ndarray): data to average with dimensions: msrmts x num_samples.
window (int): size of the sliding window to average over.
Example:
>>> import numpy as np
>>> data = np.arange(24).reshape((4,6))
>>> sliding_average(data, window=5)
array([[ 2., 3.],
[ 8., 9.],
[14., 15.],
[20., 21.]])
An exception is raised if there is insufficient data to average over.
>>> import numpy as np
>>> data = np.arange(24).reshape((4,6))
>>> sliding_average(data, window=10)
Traceback (most recent call last):
...
Exception: Not enough data to average over with window of size 10.
"""
if data.shape[1] < window:
raise Exception(
"Not enough data to average over with window of size {}.".format(window)
)
# Make a copy to store averaged data (We could alternatively do this in place).
averaged = np.zeros((data.shape[0], data.shape[1] - window + 1))
# Average over sliding window.
for i in range(averaged.shape[1]):
# flake8: noqa: E203
averaged[:, i] = np.mean(data[:, i : i + window], axis=1)
return averaged
| 30.755396
| 84
| 0.625731
|
2244ed8b157c380beffe1ea6be468beaaa3957d1
| 1,268
|
py
|
Python
|
django/contrib/gis/tests/geo3d/models.py
|
fizista/django
|
16f3a6a4c7bab11644d11c2be029374e5095cb56
|
[
"BSD-3-Clause"
] | 2
|
2016-07-23T18:08:37.000Z
|
2016-07-24T09:54:34.000Z
|
env/lib/python2.7/site-packages/django/contrib/gis/tests/geo3d/models.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 10
|
2019-12-26T17:31:31.000Z
|
2022-03-21T22:17:33.000Z
|
env/lib/python2.7/site-packages/django/contrib/gis/tests/geo3d/models.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 1
|
2020-05-25T08:55:19.000Z
|
2020-05-25T08:55:19.000Z
|
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=30)
objects = models.GeoManager()
class Meta:
abstract = True
app_label = 'geo3d'
def __str__(self):
return self.name
class City3D(NamedModel):
point = models.PointField(dim=3)
class Interstate2D(NamedModel):
line = models.LineStringField(srid=4269)
class Interstate3D(NamedModel):
line = models.LineStringField(dim=3, srid=4269)
class InterstateProj2D(NamedModel):
line = models.LineStringField(srid=32140)
class InterstateProj3D(NamedModel):
line = models.LineStringField(dim=3, srid=32140)
class Polygon2D(NamedModel):
poly = models.PolygonField(srid=32140)
class Polygon3D(NamedModel):
poly = models.PolygonField(dim=3, srid=32140)
class SimpleModel(models.Model):
objects = models.GeoManager()
class Meta:
abstract = True
app_label = 'geo3d'
class Point2D(SimpleModel):
point = models.PointField()
class Point3D(SimpleModel):
point = models.PointField(dim=3)
class MultiPoint3D(SimpleModel):
mpoint = models.MultiPointField(dim=3)
| 19.212121
| 61
| 0.72082
|
6aa50d73cdb5503d3f00ea7e1cfb6b0835e5bf1f
| 7,449
|
py
|
Python
|
pywhistle/client.py
|
RobertD502/pywhistle
|
2c0718a6041bf4c747bd229fc628774ce247c613
|
[
"MIT"
] | 20
|
2019-02-18T02:53:21.000Z
|
2021-07-10T18:22:29.000Z
|
pywhistle/client.py
|
RobertD502/pywhistle
|
2c0718a6041bf4c747bd229fc628774ce247c613
|
[
"MIT"
] | 7
|
2019-11-17T14:25:58.000Z
|
2021-09-19T18:21:48.000Z
|
pywhistle/client.py
|
RobertD502/pywhistle
|
2c0718a6041bf4c747bd229fc628774ce247c613
|
[
"MIT"
] | 15
|
2019-11-17T04:16:46.000Z
|
2021-07-18T00:51:59.000Z
|
from aiohttp import ClientSession, client_exceptions
WHISTLE_CONST = {
"proto": "https",
"remote_host": "app.whistle.com",
"endpoint": "api"
}
class Client:
"""
Returns a string: URL(host, endpoint, resource)
"""
def url(self, config, resource) -> str:
return "%s://%s/%s/%s" % (config["proto"], config["remote_host"], config["endpoint"], resource)
"""
Returns default headers as understood by the Whistle API
Not invoked when retrieving a token.
"""
def headers(self, config, token):
return {
"Host": config['remote_host'],
"Content-Type": "application/json",
"Connection": "keep-alive",
"Accept": "application/vnd.whistle.com.v5+json",
"Accept-Language": "en",
"Accept-Encoding": "br, gzip, deflate",
"User-Agent": "Winston/3.9.0 (iPhone; iOS 13.5; Build:2399; Scale/3.0)",
"Authorization": "Bearer %s" % token
}
"""
Performs AIO request. Covers all verbs.
Returns json payload.
Raises exception if received http error code.
"""
async def request(
self,
config,
method: str,
resource: str,
headers: dict = None,
data: dict = None
) -> dict:
if not headers:
headers = {}
"""Need to specify encoding when getting Achievements or Places endpoint"""
if "achievements" or "places" in resource:
async with self._websession.request(
method,
self.url(config, resource),
headers=headers,
data=data) as r:
r.raise_for_status()
return await r.json(encoding='UTF-8')
async with self._websession.request(
method,
self.url(config, resource),
headers=headers,
data=data) as r:
r.raise_for_status()
return await r.json()
"""
Helper to retrieve a single resource, such as '/pet'
"""
async def get_resource(self, config, token, resource):
return await self.request(
config,
method='get',
resource=resource,
headers=self.headers(config, token)
)
"""
Attempts login with credentials provided in init()
Returns authorization token for future requests.
"""
async def login(self, config) -> str:
return (await self.request(
config,
method='post',
resource='login',
data={
"email": self._username,
"password": self._password
}))['auth_token']
"""
Returns:
pets: array of
id, gender, name,
profile_photo_url_sizes: dict of size(wxh):url,
profile/breed, dob, address, etc.
"""
async def get_pets(self):
return await self.get_resource(self._config, self._token, 'pets')
"""
Returns:
pet: dictionary of
id, gender, name, etc for single pet
"""
async def get_pet(self, pet_id):
return await self.get_resource(self._config, self._token, 'pets/%s' % pet_id)
"""
Returns:
device: dictionary of
model_id, serial_number, battery_stats, etc
"""
async def get_device(self, serial_number):
return await self.get_resource(self._config, self._token, 'devices/%s' % serial_number)
"""
Returns:
owners: array of
id, first_name, last_name, current_user, searchable, email,
profile_photo_url_sizes': dict of size (wxh): url
"""
async def get_owners(self, pet_id):
return await self.get_resource(self._config, self._token, "pets/%s/owners" % pet_id)
"""
Returns:
array of
address, name,
id,
latitude, longitude, radius_meters,
shape,
outline: array of lat/long if shape == polygon,
per_ids: array of pet ids,
wifi network information
"""
async def get_places(self):
return await self.get_resource(self._config, self._token, "places")
"""
Returns:
stats: dict of
average_minutes_active, average_minutes_rest, average_calories, average_distance, current_streak, longest_streak, most_active_day
"""
async def get_stats(self, pet_id):
return await self.get_resource(self._config, self._token, "pets/%s/stats" % pet_id)
"""
Returns:
timeline_items: array of
type ('inside'),
data: dict of
place: array of
id, name
start_time, end_time
- or -
type('outside'),
data: dict of
static_map_url: a google map url,
origin, destination
"""
async def get_timeline(self, pet_id):
return await self.get_resource(self._config, self._token, "pets/%s/timelines/location" % pet_id)
"""
Returns:
dailies: array of
activity_goal, minutes_active, minutes_rest,
calories, distance,
day_number, excluded, timestamp, updated_at
"""
async def get_dailies(self, pet_id):
return await self.get_resource(self._config, self._token, "pets/%s/dailies" % pet_id)
"""
Returns:
daily: dict of
activities_goal, etc,
bar_chart_18min: array of values
"""
async def get_dailies_day(self, pet_id, day_id):
return await self.get_resource(self._config, self._token, "pets/%s/dailies/%s" % (pet_id, day_id))
"""
Returns:
daily_items: array of dictionaries
type: event type
title: event title
start_time: in UTC
end_time: string in UTC
"""
async def get_dailies_daily_items(self, pet_id, day_id):
return await self.get_resource(self._config, self._token, "pets/%s/dailies/%s/daily_items" % (pet_id, day_id))
"""
This one is lots of fun. Gamification for the win!
Returns:
achievements: array of
id, earned_achievement_id, actionable, type,
title, short_name,
background_color, strike_color,
badge_images: dict of size (wxh): url,
template_type, template_properties: dict of
header, footer, body, description (full text),
earned, earned_timestamp,
type_properties: dict of
progressive_type, unit, goal_value, current_value, decimal_places
"""
async def get_achievements(self, pet_id):
return await self.get_resource(self._config, self._token, "pets/%s/achievements" % pet_id)
async def async_init(self, whistle_const = WHISTLE_CONST) -> None:
self._config = whistle_const
if self._token is None:
self._token = await self.login(self._config)
def __init__(
self,
email: str,
password: str,
websession: ClientSession
) -> None:
self._config = None
self._token = None
self._username = email
self._password = password
self._websession = websession
| 30.528689
| 141
| 0.560478
|
2a5a1af65a40ae839eff82b001d865bf09f65fc2
| 45,431
|
py
|
Python
|
src/DataDriver/DataDriver.py
|
WillyWinkel/robotframework-datadriver
|
b2ca59672ffd9c44764c009198948f3b94a460ff
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/DataDriver/DataDriver.py
|
WillyWinkel/robotframework-datadriver
|
b2ca59672ffd9c44764c009198948f3b94a460ff
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/DataDriver/DataDriver.py
|
WillyWinkel/robotframework-datadriver
|
b2ca59672ffd9c44764c009198948f3b94a460ff
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2018- René Rohner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import os
import os.path
import re
import sys
from copy import deepcopy
from robot.libraries.BuiltIn import BuiltIn
from robot.api import logger
from robot.model.tags import Tags
from robot.run import USAGE
from robot.utils.argumentparser import ArgumentParser
from .ReaderConfig import ReaderConfig
from .ReaderConfig import TestCaseData
__version__ = '0.3.3'
class DataDriver:
"""|
|
===================================================
robotframework-datadriver
===================================================
DataDriver is a Data-Driven Testing library for Robot Framework.
This document explains how to use the DataDriver library listener. For
information about installation, support, and more, please visit the
`project page <https://github.com/Snooz82/robotframework-datadriver>`_
For more information about Robot Framework, see http://robotframework.org.
DataDriver is used/imported as Library but does not provide keywords
which can be used in a test. DataDriver uses the Listener Interface
Version 3 to manipulate the test cases and creates new test cases based
on a Data-File that contains the data for Data-Driven Testing. These
data file may be .csv , .xls or .xlsx files.
Data Driver is also able to cooperate with Microsoft PICT. An Open
Source Windows tool for data combination testing. Pict is able to
generate data combinations based on textual model definitions.
https://github.com/Microsoft/pict
|
Installation
------------
If you already have Python >= 3.6 with pip installed, you can simply
run:
``pip install --upgrade robotframework-datadriver``
or if you have Python 2 and 3 installed in parallel you may use
``pip3 install --upgrade robotframework-datadriver``
DataDriver in compatible with Python 2.7 only in Version 0.2.7.
``pip install --upgrade robotframework-datadriver==0.2.7``
Because Python 2.7 is deprecated, there are no new feature to python 2.7 compatible version.
|
Table of contents
-----------------
- `What DataDriver does`_
- `How DataDriver works`_
- `Usage`_
- `Structure of test suite`_
- `Structure of data file`_
- `Data Sources`_
- `File Encoding and CSV Dialect`_
- `Custom DataReader Classes`_
- `Selection of Test Cases to execute`_
|
What DataDriver does
--------------------
DataDriver is an alternative approach to create Data-Driven Tests with
Robot Framework. DataDriver creates multiple test cases based on a test
template and data content of a csv or Excel file. All created tests
share the same test sequence (keywords) and differ in the test data.
Because these tests are created on runtime only the template has to be
specified within the robot test specification and the used data are
specified in an external data file.
DataDriver gives an alternative to the build in data driven approach
like:
.. code :: robotframework
*** Settings ***
Resource login_resources.robot
Suite Setup Open my Browser
Suite Teardown Close Browsers
Test Setup Open Login Page
Test Template Invalid login
*** Test Cases *** User Passwort
Right user empty pass demo ${EMPTY}
Right user wrong pass demo FooBar
Empty user right pass ${EMPTY} mode
Empty user empty pass ${EMPTY} ${EMPTY}
Empty user wrong pass ${EMPTY} FooBar
Wrong user right pass FooBar mode
Wrong user empty pass FooBar ${EMPTY}
Wrong user wrong pass FooBar FooBar
*** Keywords ***
Invalid login
[Arguments] ${username} ${password}
Input username ${username}
Input pwd ${password}
click login button
Error page should be visible
This inbuild approach is fine for a hand full of data and a hand full of
test cases. If you have generated or calculated data and specially if
you have a variable amount of test case / combinations these robot files
becom quite a pain. With datadriver you may write the same test case
syntax but only once and deliver the data from en external data file.
One of the rare reasons when Microsoft® Excel or LibreOffice Calc may be
used in testing… ;-)
`See example test suite <#example-suite>`__
`See example csv table <#example-csv>`__
|
How DataDriver works
--------------------
When the DataDriver is used in a test suite it will be activated before
the test suite starts. It uses the Listener Interface Version 3 of Robot
Framework to read and modify the test specification objects. After
activation it searches for the ``Test Template`` -Keyword to analyze the
``[Arguments]`` it has. As a second step, it loads the data from the
specified CSV file. Based on the ``Test Template`` -Keyword, DataDriver
creates as much test cases as lines are in the CSV file. As values for
the arguments of the ``Test Template`` -Keyword it reads values from the
column of the CSV file with the matching name of the ``[Arguments]``.
For each line of the CSV data table, one test case will be created. It
is also possible to specify test case names, tags and documentation for
each test case in the specific test suite related CSV file.
|
Usage
-----
Data Driver is a "Listener" but should not be set as a global listener
as command line option of robot. Because Data Driver is a listener and a
library at the same time it sets itself as a listener when this library
is imported into a test suite.
To use it, just use it as Library in your suite. You may use the first
argument (option) which may set the file name or path to the data file.
Without any options set, it loads a .csv file which has the same name
and path like the test suite .robot .
**Example:**
.. code :: robotframework
*** Settings ***
Library DataDriver
Options
~~~~~~~
.. code :: robotframework
*** Settings ***
Library DataDriver
... file=${None}
... encoding=cp1252
... dialect=Excel-EU
... delimiter=;
... quotechar="
... escapechar=\\\\
... doublequote=True
... skipinitialspace=False
... lineterminator=\\r\\n
... sheet_name=0
... reader_class=${None}
... file_search_strategy=PATH
... file_regex=(?i)(.*?)(\\.csv)
... include=${None}
... exclude=${None}
|
Encoding
^^^^^^^^
``encoding`` must be set if it shall not be cp1252
**cp1252** is the same like:
- Windows-1252
- Latin-1
- ANSI
- Windows Western European
See `Python Standard Encoding <https://docs.python.org/3/library/codecs.html#standard-encodings>`_ for more encodings
|
Example Excel (US / comma seperated)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\r\\n'
quoting = QUOTE_MINIMAL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=excel encoding=${None}
|
Example Excel Tab (\\\\t seperated)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = '\\t'
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\r\\n'
quoting = QUOTE_MINIMAL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=excel_tab
|
Example Unix Dialect
^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\n'
quoting = QUOTE_ALL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=unix_dialect
|
Example User Defined
^^^^^^^^^^^^^^^^^^^^
User may define the format completely free.
If an option is not set, the default values are used.
To register a userdefined format user have to set the
option ``dialect`` to ``UserDefined``
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv
... dialect=UserDefined
... delimiter=.
... lineterminator=\\n
|
Limitation
~~~~~~~~~~
|
Eclipse plug-in RED
^^^^^^^^^^^^^^^^^^^
There are known issues if the Eclipse plug-in RED is used. Because the
debugging Listener of this tool pre-calculates the number of test cases
before the creation of test cases by the Data Driver. This leads to the
situation that the RED listener throws exceptions because it is called
for each test step but the RED GUI already stopped debugging so that the
listener cannot send Information to the GUI. This does not influence the
execution in any way but produces a lot of unwanted exceptions in the
Log.
|
Variable types
^^^^^^^^^^^^^^
In this early Version of DataDriver, only scalar variables are
supported. Lists and dictionaries may be added in the next releases.
|
MS Excel and typed cells
^^^^^^^^^^^^^^^^^^^^^^^^
Microsoft Excel xls or xlsx file have the possibility to type thair data
cells. Numbers are typically of the type float. If these data are not
explicitly defined as text in Excel, pandas will read it as the type
that is has in excel. Because we have to work with strings in Robot
Framework these data are converted to string. This leads to the
situation that a European time value like "04.02.2019" (4th January
2019) is handed over to Robot Framework in Iso time "2019-01-04
00:00:00". This may cause unwanted behavior. To mitigate this risk you
should define Excel based files explicitly as text within Excel.
|
How to activate the Data Driver
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To activate the DataDriver for a test suite (one specific \*.robot file)
just import it as a library. You may also specify some options if the
default parameters do not fit your needs.
**Example**:
.. code :: robotframework
*** Settings ***
Library DataDriver
Test Template Invalid Logins
|
Structure of test suite
-----------------------
|
Requirements
~~~~~~~~~~~~
In the Moment there are some requirements how a test
suite must be structured so that the DataDriver can get all the
information it needs.
- only the first test case will be used as a template. All other test
cases will be deleted.
- Test cases have to be defined with a
``Test Template``. Reason for this is, that the DataDriver needs to
know the names of the test case arguments. Test cases do not have
named arguments. Keywords do.
- The keyword which is used as
``Test Template`` must be defined within the test suite (in the same
\*.robot file). If the keyword which is used as ``Test Template`` is
defined in a ``Resource`` the DataDriver has no access to its
arguments names.
|
Example Test Suite
~~~~~~~~~~~~~~~~~~
.. code :: robotframework
***Settings***
Library DataDriver
Resource login_resources.robot
Suite Setup Open my Browser
Suite Teardown Close Browsers
Test Setup Open Login Page
Test Template Invalid Login
*** Test Case ***
Login with user ${username} and password ${password} Default UserData
***** *Keywords* *****
Invalid login
[Arguments] ${username} ${password}
Input username ${username}
Input pwd ${password}
click login button
Error page should be visible
In this example, the DataDriver is activated by using it as a Library.
It is used with default settings.
As ``Test Template`` the keyword ``Invalid Login`` is used. This
keyword has two arguments. Argument names are ``${username}`` and
``${password}``. These names have to be in the CSV file as column
header. The test case has two variable names included in its name,
which does not have any functionality in Robot Framework. However, the
Data Driver will use the test case name as a template name and
replaces the variables with the specific value of the single generated
test case.
This template test will only be used as a template. The specified data
``Default`` and ``UserData`` would only be used if no CSV file has
been found.
|
Structure of data file
----------------------
|
min. required columns
~~~~~~~~~~~~~~~~~~~~~
- ``*** Test Cases ***`` column has to be the first one.
- *Argument columns:* For each argument of the ``Test Template``
keyword one column must be existing in the data file as data source.
The name of this column must match the variable name and syntax.
|
optional columns
~~~~~~~~~~~~~~~~
- *[Tags]* column may be used to add specific tags to a test case. Tags
may be comma separated.
- *[Documentation]* column may be used to add specific test case
documentation.
|
Example Data file
~~~~~~~~~~~~~~~~~
+-------------+-------------+-------------+-------------+-------------+
| \**\* Test | ${username} | ${password} | [Tags] | [Documentat |
| Cases \**\* | | | | ion] |
| | | | | |
+=============+=============+=============+=============+=============+
| Right user | demo | ${EMPTY} | 1 | This is a |
| empty pass | | | | test case |
| | | | | documentati |
| | | | | on |
| | | | | of the |
| | | | | first one. |
+-------------+-------------+-------------+-------------+-------------+
| Right user | demo | FooBar | 2 | |
| wrong pass | | | | |
+-------------+-------------+-------------+-------------+-------------+
| empty user | ${EMPTY} | mode | 1,2,3,4 | This test |
| mode pass | | | | case has |
| | | | | the Tags |
| | | | | 1,2,3 and 4 |
| | | | | assigned. |
+-------------+-------------+-------------+-------------+-------------+
| | ${EMPTY} | ${EMPTY} | | This test |
| | | | | case has a |
| | | | | generated |
| | | | | name based |
| | | | | on template |
| | | | | name. |
+-------------+-------------+-------------+-------------+-------------+
| | ${EMPTY} | FooBar | | This test |
| | | | | case has a |
| | | | | generated |
| | | | | name based |
| | | | | on template |
| | | | | name. |
+-------------+-------------+-------------+-------------+-------------+
| | FooBar | mode | | This test |
| | | | | case has a |
| | | | | generated |
| | | | | name based |
| | | | | on template |
| | | | | name. |
+-------------+-------------+-------------+-------------+-------------+
| | FooBar | ${EMPTY} | | This test |
| | | | | case has a |
| | | | | generated |
| | | | | name based |
| | | | | on template |
| | | | | name. |
+-------------+-------------+-------------+-------------+-------------+
| | FooBar | FooBar | | This test |
| | | | | case has a |
| | | | | generated |
| | | | | name based |
| | | | | on template |
| | | | | name. |
+-------------+-------------+-------------+-------------+-------------+
In this data file, eight test cases are defined. Each line specifies one
test case. The first two test cases have specific names. The other six
test cases will generate names based on template test cases name with
the replacement of variables in this name. The order of columns is
irrelevant except the first column, ``*** Test Cases ***``
|
Data Sources
------------
|
CSV / TSV (Character-separated values)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By default DataDriver reads csv files. With the `Encoding and CSV
Dialect <#EncodingandCSVDialect>`__ settings you may configure which
structure your data source has.
|
XLS / XLSX Files
~~~~~~~~~~~~~~~~
If you want to use Excel based data sources, you may just set the file
to the extention or you may point to the correct file. If the extention
is ".xls" or ".xlsx" DataDriver will interpret it as Excel file.
You may select the sheet which will be read by the option ``sheet_name``.
By default it is set to 0 which will be the first table sheet.
You may use sheet index (0 is first sheet) or sheet name(case sensitive).
XLS interpreter will ignore all other options like encoding, delimiters etc.
.. code :: robotframework
*** Settings ***
Library DataDriver .xlsx
or:
.. code :: robotframework
*** Settings ***
Library DataDriver file=my_data_source.xlsx sheet_name=2nd Sheet
|
PICT (Pairwise Independent Combinatorial Testing)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Pict is able to generate data files based on a model file.
https://github.com/Microsoft/pict
Documentation: https://github.com/Microsoft/pict/blob/master/doc/pict.md
|
Requirements
^^^^^^^^^^^^
- Path to pict.exe must be set in the %PATH% environment variable.
- Data model file has the file extention ".pict"
- Pict model file must be encoded in UTF-8
|
How it works
^^^^^^^^^^^^
If the file option is set to a file with the extention pict, DataDriver
will hand over this file to pict.exe and let it automatically generates
a file with the extention ".pictout". This file will the be used as data
source for the test generation. (It is tab seperated and UTF-8 encoded)
Except the file option all other options of the library will be ignored.
.. code :: robotframework
*** Settings ***
Library DataDriver my_model_file.pict
|
File Encoding and CSV Dialect
-----------------------------
CSV is far away from well designed and has absolutely no "common"
format. Therefore it is possible to define your own dialect or use
predefined. The default is Excel-EU which is a semicolon separated
file.
These Settings are changeable as options of the Data Driver Library.
|
file=
~~~~~
.. code :: robotframework
*** Settings ***
Library DataDriver file=../data/my_data_source.csv
- None(default): Data Driver will search in the test suites folder if a
\*.csv file with the same name than the test suite \*.robot file exists
- only file extention: if you just set a file extentions like ".xls" or
".xlsx" DataDriver will search
- absolute path: If an absolute path to a file is set, DataDriver tries
to find and open the given data file.
- relative path: If the option does not point to a data file as an
absolute path, Data Driver tries to find a data file relative to the
folder where the test suite is located.
|
encoding=
~~~~~~~~~
may set the encoding of the CSV file. i.e.
``cp1252, ascii, iso-8859-1, latin-1, utf_8, utf_16, utf_16_be, utf_16_le``,
etc… https://docs.python.org/3.7/library/codecs.html#standard-encodings
|
dialect=
~~~~~~~~
You may change the CSV Dialect here. If the Dialect is set to
‘UserDefined’ the following options are used. Otherwise, they are
ignored.
supported Dialects are:
.. code:: python
"excel"
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\r\\n'
quoting = QUOTE_MINIMAL
"excel-tab"
delimiter = '\\t'
"unix"
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\n'
quoting = QUOTE_ALL
|
Defaults:
~~~~~~~~~
.. code:: python
file=None,
encoding='cp1252',
dialect='Excel-EU',
delimiter=';',
quotechar='"',
escapechar='\\\\',
doublequote=True,
skipinitialspace=False,
lineterminator='\\r\\n',
sheet_name=0
|
Custom DataReader Classes
-------------------------
It is possible to write your own DataReader Class as a plugin for DataDriver.
DataReader Classes are called from DataDriver to return a list of TestCaseData.
|
Using Custom DataReader
~~~~~~~~~~~~~~~~~~~~~~~
DataReader classes are loaded dynamically into DataDriver while runtime.
DataDriver identifies the DataReader to load by the file extantion of the data file or by the option ``reader_class``.
|
Select Reader by File Extension:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. code :: robotframework
*** Settings ***
Library DataDriver file=mydata.csv
This will load the class ``csv_Reader`` from ``csv_reader.py`` from the same folder.
|
Select Reader by Option:
^^^^^^^^^^^^^^^^^^^^^^^^
.. code :: robotframework
*** Settings ***
Library DataDriver file=mydata.csv reader_class=generic_csv_reader dialect=userdefined delimiter=\\t encoding=UTF-8
This will load the class ``generic_csv_reader`` from ``generic_csv_reader.py`` from same folder.
|
Create Custom Reader
~~~~~~~~~~~~~~~~~~~~
Recommendation:
Have a look to the Source Code of existing DataReader like ``csv_reader.py`` or ``generic_csv_reader.py`` .
To write you own reader, create a class inherited from ``AbstractReaderClass``.
Your class will get all available configs from DataDriver as an object of ``ReaderConfig`` on ``__init__``.
DataDriver will call the method ``get_data_from_source``
This method should then load you data from your custom source and stores them into list of object of ``TestCaseData``.
This List of ```TestCaseData`` will be returned to DataDriver.
``AbstractReaderClass`` has also some optional helper methods that may be useful.
You can either place the custom reader with the others or anywhere on the disk.
In the first case just use it like the others:
.. code :: robotframework
*** Settings ***
Library DataDriver
... reader_class=my_reader.py
It is possible to pass an absolut path to a custom Reader:
.. code :: robotframework
*** Settings ***
Library DataDriver
... reader_class=C:/data/my_reader.py
This `my_reader.py` should implement a class inherited from AbstractReaderClass that is named `my_reader`.
.. code :: python
from DataDriver.AbstractReaderClass import AbstractReaderClass
class my_reader(AbstractReaderClass):
def get_data_from_source(self):
...
return self.data_table
See other readers as example.
|
Selection of Test Cases to execute
----------------------------------
Because test cases that are created by DataDriver after parsing while execution,
it is not possible to use some Robot Framework methods to select test cases.
Examples for options that have to be used differently:
+-------------------+-----------------------------------------------------------------------+
| robot option | Description |
+===================+=======================================================================+
| ``--test`` | Selects the test cases by name. |
+-------------------+-----------------------------------------------------------------------+
| ``--task`` | Alias for --test that can be used when executing tasks. |
+-------------------+-----------------------------------------------------------------------+
| ``--rerunfailed`` | Selects failed tests from an earlier output file to be re-executed. |
+-------------------+-----------------------------------------------------------------------+
| ``--include`` | Selects the test cases by tag. |
+-------------------+-----------------------------------------------------------------------+
| ``--exclude`` | Selects the test cases by tag. |
+-------------------+-----------------------------------------------------------------------+
|
Selection of test cases by name
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
Select a single test case:
^^^^^^^^^^^^^^^^^^^^^^^^^^
To execute just a single test case by its exact name it is possible to execute the test suite
and set the global variable ${DYNAMICTEST} with the name of the test case to execute as value.
Pattern must be ``suitename.testcasename``.
Example:
.. code ::
robot --variable "DYNAMICTEST:my suite name.test case to be executed" my_suite_name.robot
Pabot uses this feature to execute a single test case when using ``--testlevelsplit``
|
Select a list of test cases:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It is possible to set a list of test case names by using the variable ${DYNAMICTESTS} (plural).
This variable must be a string and the list of names must be pipe-seperated (``|``).
Example:
.. code::
robot --variable DYNAMICTESTS:firstsuitename.testcase1|firstsuitename.testcase3|anothersuitename.othertestcase foldername
It is also possible to set the variable @{DYNAMICTESTS} as a list variable from i.e. python code.
|
Re-run failed test cases:
~~~~~~~~~~~~~~~~~~~~~~~~~
Because it is not possible to use the command line argument ``--rerunfailed`` from robot directly,
DataDriver brings a Pre-Run-Modifier that handles this issue.
Normally reexecution of failed testcases has three steps.
- original execution
- re-execution the failed ones based on original execution output
- merging original execution output with re-execution output
The DataDriver.rerunfailed Pre-Run-Modifier removes all passed test cases based on a former output.xml.
Example:
.. code ::
robot --output original.xml tests # first execute all tests
robot --prerunmodifier DataDriver.rerunfailed:original.xml --output rerun.xml tests # then re-execute failing
rebot --merge original.xml rerun.xml # finally merge results
Be aware, that in this case it is not allowed to use "``:``" as character in the original output file path.
If you want to set a full path on windows like ``e:\\myrobottest\\output.xml`` you have to use "``;``"
as argument seperator.
Example:
.. code ::
robot --prerunmodifier DataDriver.rerunfailed;e:\\myrobottest\\output.xml --output e:\\myrobottest\\rerun.xml tests
|
Filtering with tags.
~~~~~~~~~~~~~~~~~~~~
New in ``0.3.1``
It is possible to use tags to filter the data source.
To use this, tags must be assigned to the test cases in data source.
|
Robot Framework Command Line Arguments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To filter the source, the normal command line arguments of Robot Framework can be used.
See Robot Framework Userguide_ for more information
Be aware that the filtering of Robot Framework itself is done before DataDriver is called.
This means if the Template test is already filtered out by Robot Framework, DataDriver can never be called.
If you want to use ``--include`` the DataDriver TestSuite should have a ``DefaultTag`` or ``ForceTag`` that
fulfills these requirements.
.. _Userguide: http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#tag-patterns
Example: ``robot --include 1OR2 --exclude foo DataDriven.robot``
|
Filter based on Library Options
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It is also possible to filter the data source by an init option of DataDriver.
If these Options are set, Robot Framework Filtering will be ignored.
Example:
.. code :: robotframework
*** Settings ***
Library DataDriver include=1OR2 exclude=foo
|
"""
ROBOT_LIBRARY_DOC_FORMAT = 'reST'
ROBOT_LISTENER_API_VERSION = 3
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self,
file=None,
encoding='cp1252',
dialect='Excel-EU',
delimiter=';',
quotechar='"',
escapechar='\\',
doublequote=True,
skipinitialspace=False,
lineterminator='\r\n',
sheet_name=0,
reader_class=None,
file_search_strategy='PATH',
file_regex=r'(?i)(.*?)(\.csv)',
include=None,
exclude=None
):
"""**Example:**
.. code :: robotframework
*** Settings ***
Library DataDriver
Options
~~~~~~~
.. code :: robotframework
*** Settings ***
Library DataDriver
... file=None
... encoding=cp1252
... dialect=Excel-EU
... delimiter=;
... quotechar="
... escapechar=\\\\
... doublequote=True
... skipinitialspace=False
... lineterminator=\\r\\n
... sheet_name=0
... reader_class=None
... file_search_strategy=PATH
... file_regex=(?i)(.*?)(\\.csv)
... include=None
... exclude=None
|
Encoding
^^^^^^^^
``encoding`` must be set if it shall not be cp1252
**cp1252** is the same like:
- Windows-1252
- Latin-1
- ANSI
- Windows Western European
See `Python Standard Encoding <https://docs.python.org/3/library/codecs.html#standard-encodings>`_ for more encodings
|
Example Excel (US / comma seperated)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\r\\n'
quoting = QUOTE_MINIMAL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=excel encoding=${None}
|
Example Excel Tab (\\\\t seperated)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = '\\t'
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\r\\n'
quoting = QUOTE_MINIMAL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=excel_tab
|
Example Unix Dialect
^^^^^^^^^^^^^^^^^^^^
Dialect Defaults:
.. code :: python
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\\n'
quoting = QUOTE_ALL
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv dialect=unix_dialect
|
Example User Defined
^^^^^^^^^^^^^^^^^^^^
User may define the format completely free.
If an option is not set, the default values are used.
To register a userdefined format user have to set the
option ``dialect`` to ``UserDefined``
Usage in Robot Framework
.. code :: robotframework
*** Settings ***
Library DataDriver my_data_file.csv
... dialect=UserDefined
... delimiter=.
... lineterminator=\\n
"""
self.ROBOT_LIBRARY_LISTENER = self
try:
re.compile(file_regex)
except re.error as e:
file_regex = r'(?i)(.*?)(\.csv)'
logger.console(f'[ DataDriver ] invalid Regex! used {file_regex} instead.')
logger.console(e)
options = self._robot_options()
self.include = options['include'] if not include else include
self.exclude = options['exclude'] if not exclude else exclude
self.reader_config = ReaderConfig(
file=file,
encoding=encoding,
dialect=dialect,
delimiter=delimiter,
quotechar=quotechar,
escapechar=escapechar,
doublequote=doublequote,
skipinitialspace=skipinitialspace,
lineterminator=lineterminator,
sheet_name=sheet_name,
reader_class=reader_class,
file_search_strategy=file_search_strategy.upper(),
file_regex=file_regex,
include=self.include,
exclude=self.exclude
)
self.suite_source = None
self.template_test = None
self.template_keyword = None
self.data_table = None
self.test_case_data = TestCaseData()
def _start_suite(self, suite, result):
"""Called when a test suite starts.
Data and result are model objects representing the executed test suite and its execution results, respectively.
:param suite: class robot.running.model.TestSuite(name='', doc='', metadata=None, source=None)
:param result: NOT USED
"""
log_level = BuiltIn().get_variable_value('${LOG LEVEL}')
self.DEBUG = log_level in ['DEBUG', 'TRACE']
self.suite_source = suite.source
self._create_data_table()
if self.DEBUG:
logger.console('[ DataDriver ] data Table created')
self.template_test = suite.tests[0]
self.template_keyword = self._get_template_keyword(suite)
suite.tests = self._get_filtered_test_list()
def _get_filtered_test_list(self):
temp_test_list = list()
dynamic_test_list = self._get_filter_dynamic_test_names()
for self.test_case_data in self.data_table:
if self._included_by_tags() and self._not_excluded_by_tags():
self._create_test_from_template()
if not dynamic_test_list or f'{self.test.parent.name}.{self.test.name}' in dynamic_test_list:
temp_test_list.append(self.test)
return temp_test_list
def _get_filter_dynamic_test_names(self):
dynamic_test_list = BuiltIn().get_variable_value('${DYNAMICTESTS}')
if isinstance(dynamic_test_list, str):
dynamic_test_list = dynamic_test_list.split('|')
elif isinstance(dynamic_test_list, list):
pass # list can just stay as list.
else:
dynamic_test_name = BuiltIn().get_variable_value('${DYNAMICTEST}')
if dynamic_test_name:
dynamic_test_list = [dynamic_test_name]
else:
dynamic_test_list = None
return dynamic_test_list
def _included_by_tags(self):
if self.include and isinstance(self.test_case_data.tags, list):
tags = Tags()
tags.add(self.test_case_data.tags)
return tags.match(self.include)
return True
def _not_excluded_by_tags(self):
if self.exclude and isinstance(self.test_case_data.tags, list):
tags = Tags()
tags.add(self.test_case_data.tags)
return not tags.match(self.exclude)
return True
def _create_data_table(self):
"""
this function creates a dictionary which contains all data from data file.
Keys are header names.
Values are data of this column as array.
"""
self._resolve_file_attribute()
self.data_table = self._data_reader().get_data_from_source()
if self.DEBUG:
logger.console(f"[ DataDriver ] Opening file '{self.reader_config.file}'")
logger.console(f'[ DataDriver ] {len(self.data_table)} Test Cases loaded...')
def _data_reader(self):
if not self.reader_config.reader_class:
reader = self._get_data_reader_from_file_extension()
else:
reader = self._get_data_reader_from_reader_class()
return reader
def _get_data_reader_from_file_extension(self):
filename, file_extension = os.path.splitext(self.reader_config.file)
reader_type = file_extension.lower()[1:]
if self.DEBUG:
logger.console(f'[ DataDriver ] Initialized in {reader_type}-mode.')
reader_module = importlib.import_module(f'..{reader_type}_reader', 'DataDriver.DataDriver')
if self.DEBUG:
logger.console(f'[ DataDriver ] Reader Module: {reader_module}')
reader_class = getattr(reader_module, f'{reader_type}_Reader')
reader = reader_class(self.reader_config)
return reader
def _get_data_reader_from_reader_class(self):
reader_name = self.reader_config.reader_class
if self.DEBUG:
logger.console(f'[ DataDriver ] Initializes {reader_name}')
if os.path.isfile(reader_name):
if self.DEBUG:
logger.console(f'[ DataDriver ] Load from file {reader_name}')
dirname, basename = os.path.split(reader_name)
package = os.path.basename(dirname)
sys.path.insert(0, os.path.dirname(dirname))
module_name = os.path.splitext(basename)[0]
reader_module = importlib.import_module(package + '.' + module_name)
reader_name = module_name
else:
reader_module = importlib.import_module(f'..{reader_name}', 'DataDriver.DataDriver')
if self.DEBUG:
logger.console(f'[ DataDriver ] Reader Module: {reader_module}')
reader_class = getattr(reader_module, f'{reader_name}')
if self.DEBUG:
logger.console(f'[ DataDriver ] Reader Class: {reader_class}')
reader = reader_class(self.reader_config)
return reader
def _resolve_file_attribute(self):
if self.reader_config.file_search_strategy == 'PATH':
if (not self.reader_config.file) or ('' == self.reader_config.file[:self.reader_config.file.rfind('.')]):
self._set_data_file_to_suite_source()
else:
self._check_if_file_exists_as_path_or_in_suite()
elif self.reader_config.file_search_strategy == 'REGEX':
self._search_file_from_regex()
elif self.reader_config.file_search_strategy == 'NONE':
pass # If file_search_strategy is None, no validation of the input file is done. Use i.e. for SQL sources.
else:
raise ValueError(f'file_search_strategy={self.reader_config.file_search_strategy} is not a valid value!')
def _set_data_file_to_suite_source(self):
if not self.reader_config.file:
suite_path_as_data_file = f'{self.suite_source[:self.suite_source.rfind(".")]}.csv'
else:
suite_path = self.suite_source[:self.suite_source.rfind(".")]
file_extension = self.reader_config.file[self.reader_config.file.rfind("."):]
suite_path_as_data_file = f'{suite_path}{file_extension}'
if os.path.isfile(suite_path_as_data_file):
self.reader_config.file = suite_path_as_data_file
else:
raise FileNotFoundError(
f'File attribute was empty. Tried to find {suite_path_as_data_file} but file does not exist.')
def _check_if_file_exists_as_path_or_in_suite(self):
if not os.path.isfile(self.reader_config.file):
suite_dir = str(os.path.dirname(self.suite_source))
file_in_suite_dir = os.path.join(suite_dir, self.reader_config.file)
if os.path.isfile(file_in_suite_dir):
self.reader_config.file = file_in_suite_dir
else:
raise FileNotFoundError(
f'File attribute was not a full path. Tried to find {file_in_suite_dir} but file does not exist.')
def _search_file_from_regex(self):
if os.path.isdir(self.reader_config.file):
for filename in os.listdir(self.reader_config.file):
if re.match(self.reader_config.file_regex, filename):
self.reader_config.file = os.path.join(self.reader_config.file, filename)
break
def _get_template_keyword(self, suite):
template = self.template_test.template
if template:
for keyword in suite.resource.keywords:
if self._is_same_keyword(keyword.name, template):
return keyword
raise AttributeError('No "Test Template" keyword found for first test case.')
def _is_same_keyword(self, first, second):
return self._get_normalized_keyword(first) == self._get_normalized_keyword(second)
def _get_normalized_keyword(self, keyword):
return keyword.lower().replace(' ', '').replace('_', '')
def _create_test_from_template(self):
self.test = deepcopy(self.template_test)
self._replace_test_case_name()
self._replace_test_case_keywords()
self._add_test_case_tags()
self._replace_test_case_doc()
def _replace_test_case_name(self):
if self.test_case_data.test_case_name == '':
for variable_name in self.test_case_data.arguments:
self.test.name = self.test.name.replace(variable_name, self.test_case_data.arguments[variable_name])
else:
self.test.name = self.test_case_data.test_case_name
def _replace_test_case_keywords(self):
self.test.keywords.clear()
if self.template_test.keywords.setup is not None:
self.test.keywords.create(name=self.template_test.keywords.setup.name, type='setup',
args=self.template_test.keywords.setup.args)
self.test.keywords.create(name=self.template_keyword.name,
args=self._get_template_arguments())
if self.template_test.keywords.teardown is not None:
self.test.keywords.create(name=self.template_test.keywords.teardown.name, type='teardown',
args=self.template_test.keywords.teardown.args)
def _get_template_arguments(self):
return_arguments = []
for arg in self.template_keyword.args:
if arg in self.test_case_data.arguments:
return_arguments.append(self.test_case_data.arguments[arg])
# Todo: here i have to handle the dictionaries stuff
else:
return_arguments.append(arg)
return return_arguments
def _add_test_case_tags(self):
if isinstance(self.test_case_data.tags, list):
for tag in self.test_case_data.tags:
self.test.tags.add(tag.strip())
self._add_tag_if_pabot_dryrun()
def _add_tag_if_pabot_dryrun(self):
if BuiltIn().get_variable_value('${PABOTQUEUEINDEX}') == '-1':
self.test.tags.add('pabot:dynamictest')
def _replace_test_case_doc(self):
self.test.doc = self.test_case_data.documentation
def _robot_options(self):
arg_parser = ArgumentParser(USAGE, auto_pythonpath=False, auto_argumentfile=True, env_options='ROBOT_OPTIONS')
valid_args = self._filter_args(arg_parser)
options, data_sources = arg_parser.parse_args(valid_args)
return options
def _filter_args(self, arg_parser):
arg_state = 0
valid_robot_args = list()
for arg in sys.argv[1:]:
if arg_state == 0:
arg_state = self._get_argument_state(arg, arg_parser)
if arg_state > 0:
valid_robot_args.append(arg)
arg_state -= 1
return valid_robot_args
def _get_argument_state(self, arg, arg_parser):
short_opts = arg_parser._short_opts
long_opts = arg_parser._long_opts
param_opt = [l_opt[:-1] for l_opt in long_opts if l_opt[-1:] == '=']
arg_state = 0
if len(arg) == 2 and arg[0] == '-':
if arg[1] in '.?hTX':
arg_state = 1
elif arg[1] in short_opts:
arg_state = 2
elif len(arg) > 2 and arg[:2] == '--':
if arg[2:] in param_opt:
arg_state = 2
elif arg[2:] in long_opts:
arg_state = 1
return arg_state
| 32.778499
| 139
| 0.599657
|
ae8425d1ecd7277285f54d7661eb0b7626689220
| 12,407
|
py
|
Python
|
src/vm-builder-0.12.4+bzr489/build/lib.linux-x86_64-2.7/VMBuilder/plugins/__init__.py
|
cryptorinium/Num2
|
e83ea5b18b7822c73699de2a667d189791c48fbb
|
[
"MIT"
] | null | null | null |
src/vm-builder-0.12.4+bzr489/build/lib.linux-x86_64-2.7/VMBuilder/plugins/__init__.py
|
cryptorinium/Num2
|
e83ea5b18b7822c73699de2a667d189791c48fbb
|
[
"MIT"
] | null | null | null |
src/vm-builder-0.12.4+bzr489/build/lib.linux-x86_64-2.7/VMBuilder/plugins/__init__.py
|
cryptorinium/Num2
|
e83ea5b18b7822c73699de2a667d189791c48fbb
|
[
"MIT"
] | null | null | null |
#
# Uncomplicated VM Builder
# Copyright (C) 2007-2009 Canonical Ltd.
#
# See AUTHORS for list of contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import re
import shutil
import VMBuilder
import VMBuilder.util as util
from VMBuilder.exception import VMBuilderException
def load_plugins():
for plugin in find_plugins():
exec "import %s" % plugin
def find_plugins():
retval = []
for plugin_dir in __path__:
for p in os.listdir(plugin_dir):
path = '%s/%s' % (plugin_dir, p)
if os.path.isdir(path) and os.path.isfile('%s/__init__.py' % path):
retval.append("VMBuilder.plugins.%s" % p)
return retval
class Plugin(object):
priority = 10
def __init__(self, context):
self.context = context
self._setting_groups = []
self.register_options()
def register_options(self):
pass
def set_defaults(self):
pass
def preflight_check(self):
"""
Override this method with checks for anything that might cause the VM creation to fail
raise an exception if you can see already that this won't work
"""
pass
def post_install(self):
"""
This is called just after the distro is installed, before it gets copied to the fs images.
"""
pass
def install_file(self, path, contents=None, source=None, mode=None):
fullpath = '%s%s' % (self.context.chroot_dir, path)
if not os.path.isdir(os.path.dirname(fullpath)):
os.makedirs(os.path.dirname(fullpath))
if source and not contents:
shutil.copy(source, fullpath)
else:
fp = open(fullpath, 'w')
fp.write(contents)
fp.close()
if mode:
os.chmod(fullpath, mode)
return fullpath
def install_from_template(self, path, tmplname, context=None, mode=None):
return self.install_file(path, VMBuilder.util.render_template(self.__module__.split('.')[2], self.context, tmplname, context), mode=mode)
def run_in_target(self, *args, **kwargs):
return util.run_cmd('chroot', self.chroot_dir, *args, **kwargs)
def call_hooks(self, *args, **kwargs):
return util.call_hooks(self.context, *args, **kwargs)
# Settings
class SettingGroup(object):
def __init__(self, plugin, context, name):
# The plugin that owns this setting
self.plugin = plugin
# The VM object
self.context = context
# Name of the Setting Group
self.name = name
# A list of Setting objects
self._settings = []
def add_setting(self, *args, **kwargs):
# kwarg['type'] is used to determine which type of Setting object to create
# but we don't want to pass it on to its __init__.
if 'type' in kwargs:
type = kwargs['type']
del kwargs['type']
else:
type = 'str'
if type == 'str':
setting = self.plugin.StringSetting(self, *args, **kwargs)
elif type == 'bool':
setting = self.plugin.BooleanSetting(self, *args, **kwargs)
elif type == 'list':
setting = self.plugin.ListSetting(self, *args, **kwargs)
elif type == 'int':
setting = self.plugin.IntSetting(self, *args, **kwargs)
else:
raise VMBuilderException("Unknown setting type: '%s' (Plugin: '%s', Setting group: '%s', Setting: '%s')" %
(type,
self.plugin.__module__,
self.name,
args[0]))
self._settings.append(setting)
class Setting(object):
default = None
def __init__(self, setting_group, name, metavar=None, help=None, extra_args=None, valid_options=None, action=None, **kwargs):
# The Setting Group object that owns this Setting
self.setting_group = setting_group
# The name if the setting
name_regex = '[a-z0-9-]+$'
if not re.match(name_regex, name):
raise VMBuilderException('Invalid name for Setting: %s. Must match regex: %s' % (name, name_regex))
else:
self.name = name
self.default = kwargs.get('default', self.default)
self.help = help
# Alternate names (for the CLI)
self.extra_args = extra_args or []
self.metavar = metavar
self.value = None
self.value_set = False
self.valid_options = valid_options
if self.name in self.setting_group.context._config:
raise VMBuilderException("Setting named %s already exists. Previous definition in %s/%s/%s." %
(self.name,
self.setting_group.plugin.__name__,
self.setting_group.plugin._config[self.name].setting_group.name,
self.setting_group.plugin._config[self.name].name))
self.setting_group.context._config[self.name] = self
def get_value(self):
"""
If a value has previously been set, return it.
If not, return the default value.
"""
if self.value_set:
return self.value
else:
return self.default
def do_check_value(self, value):
"""
Checks the value's validity.
"""
if self.valid_options is not None:
if value not in self.valid_options:
raise VMBuilderException('%r is not a valid option for %s. Valid options are: %s' % (value, self.name, ' '.join(self.valid_options)))
else:
return self.check_value(value)
def get_valid_options(self):
return self.valid_options
def set_valid_options(self, valid_options):
"""
Set the list of valid options for this setting.
"""
if not type(valid_options) == list and valid_options is not None:
raise VMBuilderException('set_valid_options only accepts lists or None')
if valid_options:
for option in valid_options:
self.check_value(option)
self.valid_options = valid_options
def get_default(self):
"""
Return the default value.
"""
return self.default
def set_default(self, value):
"""
Set a new default value.
"""
value = self.do_check_value(value)
self.default = value
def set_value_fuzzy(self, value):
"""
Set new value.
Contrary to L{set_value}, L{set_value_fuzzy} will attempt
to turn L{value} into the target type. E.g. turning '10'
into 10, "main,universe,multiverse" into ['main',
'universe', 'multiverse']
"""
return self.set_value(value)
def set_value(self, value):
"""
Set a new value.
"""
value = self.do_check_value(value)
self.value = value
self.value_set = True
class ListSetting(Setting):
def __init__(self, *args, **kwargs):
self.default = []
super(Plugin.ListSetting, self).__init__(*args, **kwargs)
def set_value_fuzzy(self, value):
if len(value) == 1 and type(value[0]) == str:
value = value[0]
if type(value) == str:
if value == '':
return self.set_value([])
for sep in [':', ',']:
if sep in value:
split_regex = re.compile("\s*%s\s*" % sep)
return self.set_value(split_regex.split(value))
value = [value]
self.set_value(value)
return self.set_value(value)
def check_value(self, value):
if not type(value) == list:
raise VMBuilderException('%r is type %s, expected list.' % (value, type(value)))
return value
class IntSetting(Setting):
def set_value_fuzzy(self, value):
if type(value) != int:
try:
value = int(value)
except ValueError:
raise VMBuilderException('Could not interpret %r as an int.' % (value,))
return self.set_value(value)
def check_value(self, value):
if not type(value) == int:
raise VMBuilderException('%r is type %s, expected int.' % (value, type(value)))
return value
class BooleanSetting(Setting):
def set_value_fuzzy(self, value):
if type(value) == str:
if value.lower() in ['no', 'false', 'off', '0']:
value = False
elif value.lower() in ['yes', 'true', 'on', '1']:
value = True
else:
raise VMBuilderException('Could not interpret %r as a boolean value.' % (value,))
return self.set_value(value)
def check_value(self, value):
if not type(value) == bool:
raise VMBuilderException('%r is type %s, expected bool.' % (value, type(value)))
return value
class StringSetting(Setting):
def check_value(self, value):
if not type(value) == str:
raise VMBuilderException('%r is type %s, expected str.' % (value, type(value)))
return value
def setting_group(self, name):
setting_group = self.SettingGroup(self, self.context, name)
self._setting_groups.append(setting_group)
return setting_group
def has_setting(self, name):
return name in self.context._config
def get_setting(self, name):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
return self.context._config[name].get_value()
def set_setting_fuzzy(self, name, value):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
# print 'fuzzy setting of %s: %r' % (name, value)
self.context._config[name].set_value_fuzzy(value)
def set_setting(self, name, value):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
self.context._config[name].set_value(value)
def set_setting_default(self, name, value):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
self.context._config[name].set_default(value)
def get_setting_default(self, name):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
return self.context._config[name].get_default()
def get_setting_valid_options(self, name):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
return self.context._config[name].get_valid_options()
def set_setting_valid_options(self, name, valid_options):
if not name in self.context._config:
raise VMBuilderException('Unknown config key: %s' % name)
self.context._config[name].set_valid_options(valid_options)
| 37.59697
| 153
| 0.562021
|
15dfe899cbaa455fd0f81f045a547f7803f7a859
| 52,615
|
py
|
Python
|
userbot/google_imgs.py
|
karcexy/Marshmello
|
8035ddf830880fb860a12af3562a07f46f294641
|
[
"MIT"
] | 2
|
2020-12-06T03:46:08.000Z
|
2022-02-19T20:34:52.000Z
|
userbot/google_imgs.py
|
karcexy/Marshmello
|
8035ddf830880fb860a12af3562a07f46f294641
|
[
"MIT"
] | 4
|
2020-11-07T07:39:51.000Z
|
2020-11-10T03:46:41.000Z
|
userbot/google_imgs.py
|
karcexy/Marshmello
|
8035ddf830880fb860a12af3562a07f46f294641
|
[
"MIT"
] | 9
|
2020-11-28T11:30:44.000Z
|
2021-06-01T07:11:57.000Z
|
#Credit - Catuserbot @sandy1709
#!/usr/bin/env python
# In[ ]:
# coding: utf-8
###### Searching and Downloading Google Images to the local disk ######
# Import Libraries
# import encodes as request
import sys
import ast
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
import urllib.request
from urllib.request import Request, urlopen
from urllib.request import URLError, HTTPError
from urllib.parse import quote
import http.client
from http.client import IncompleteRead, BadStatusLine
http.client._MAXHEADERS = 1000
else: # If the Current Version of Python is 2.x
import urllib2
from urllib2 import Request, urlopen
from urllib2 import URLError, HTTPError
from urllib import quote
import httplib
from httplib import IncompleteRead, BadStatusLine
httplib._MAXHEADERS = 1000
import time # Importing the time library to check the time of code execution
import os
import argparse
import ssl
import datetime
import json
import re
import codecs
import socket
args_list = ["keywords", "keywords_from_file", "prefix_keywords", "suffix_keywords",
"limit", "format", "color", "color_type", "usage_rights", "size",
"exact_size", "aspect_ratio", "type", "time", "time_range", "delay", "url", "single_image",
"output_directory", "image_directory", "no_directory", "proxy", "similar_images", "specific_site",
"print_urls", "print_size", "print_paths", "metadata", "extract_metadata", "socket_timeout",
"thumbnail", "thumbnail_only", "language", "prefix", "chromedriver", "related_images", "safe_search", "no_numbering",
"offset", "no_download","save_source","silent_mode","ignore_urls"]
def user_input():
config = argparse.ArgumentParser()
config.add_argument('-cf', '--config_file', help='config file name', default='', type=str, required=False)
config_file_check = config.parse_known_args()
object_check = vars(config_file_check[0])
if object_check['config_file'] != '':
records = []
json_file = json.load(open(config_file_check[0].config_file))
for record in range(0,len(json_file['Records'])):
arguments = {}
for i in args_list:
arguments[i] = None
for key, value in json_file['Records'][record].items():
arguments[key] = value
records.append(arguments)
records_count = len(records)
else:
# Taking command line arguments from users
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--keywords', help='delimited list input', type=str, required=False)
parser.add_argument('-kf', '--keywords_from_file', help='extract list of keywords from a text file', type=str, required=False)
parser.add_argument('-sk', '--suffix_keywords', help='comma separated additional words added after to main keyword', type=str, required=False)
parser.add_argument('-pk', '--prefix_keywords', help='comma separated additional words added before main keyword', type=str, required=False)
parser.add_argument('-l', '--limit', help='delimited list input', type=str, required=False)
parser.add_argument('-f', '--format', help='download images with specific format', type=str, required=False,
choices=['jpg', 'gif', 'png', 'bmp', 'svg', 'webp', 'ico'])
parser.add_argument('-u', '--url', help='search with google image URL', type=str, required=False)
parser.add_argument('-x', '--single_image', help='downloading a single image from URL', type=str, required=False)
parser.add_argument('-o', '--output_directory', help='download images in a specific main directory', type=str, required=False)
parser.add_argument('-i', '--image_directory', help='download images in a specific sub-directory', type=str, required=False)
parser.add_argument('-n', '--no_directory', default=False, help='download images in the main directory but no sub-directory', action="store_true")
parser.add_argument('-d', '--delay', help='delay in seconds to wait between downloading two images', type=int, required=False)
parser.add_argument('-co', '--color', help='filter on color', type=str, required=False,
choices=['red', 'orange', 'yellow', 'green', 'teal', 'blue', 'purple', 'pink', 'white', 'gray', 'black', 'brown'])
parser.add_argument('-ct', '--color_type', help='filter on color', type=str, required=False,
choices=['full-color', 'black-and-white', 'transparent'])
parser.add_argument('-r', '--usage_rights', help='usage rights', type=str, required=False,
choices=['labeled-for-reuse-with-modifications','labeled-for-reuse','labeled-for-noncommercial-reuse-with-modification','labeled-for-nocommercial-reuse'])
parser.add_argument('-s', '--size', help='image size', type=str, required=False,
choices=['large','medium','icon','>400*300','>640*480','>800*600','>1024*768','>2MP','>4MP','>6MP','>8MP','>10MP','>12MP','>15MP','>20MP','>40MP','>70MP'])
parser.add_argument('-es', '--exact_size', help='exact image resolution "WIDTH,HEIGHT"', type=str, required=False)
parser.add_argument('-t', '--type', help='image type', type=str, required=False,
choices=['face','photo','clipart','line-drawing','animated'])
parser.add_argument('-w', '--time', help='image age', type=str, required=False,
choices=['past-24-hours','past-7-days','past-month','past-year'])
parser.add_argument('-wr', '--time_range', help='time range for the age of the image. should be in the format {"time_min":"MM/DD/YYYY","time_max":"MM/DD/YYYY"}', type=str, required=False)
parser.add_argument('-a', '--aspect_ratio', help='comma separated additional words added to keywords', type=str, required=False,
choices=['tall', 'square', 'wide', 'panoramic'])
parser.add_argument('-si', '--similar_images', help='downloads images very similar to the image URL you provide', type=str, required=False)
parser.add_argument('-ss', '--specific_site', help='downloads images that are indexed from a specific website', type=str, required=False)
parser.add_argument('-p', '--print_urls', default=False, help="Print the URLs of the images", action="store_true")
parser.add_argument('-ps', '--print_size', default=False, help="Print the size of the images on disk", action="store_true")
parser.add_argument('-pp', '--print_paths', default=False, help="Prints the list of absolute paths of the images",action="store_true")
parser.add_argument('-m', '--metadata', default=False, help="Print the metadata of the image", action="store_true")
parser.add_argument('-e', '--extract_metadata', default=False, help="Dumps all the logs into a text file", action="store_true")
parser.add_argument('-st', '--socket_timeout', default=False, help="Connection timeout waiting for the image to download", type=float)
parser.add_argument('-th', '--thumbnail', default=False, help="Downloads image thumbnail along with the actual image", action="store_true")
parser.add_argument('-tho', '--thumbnail_only', default=False, help="Downloads only thumbnail without downloading actual images", action="store_true")
parser.add_argument('-la', '--language', default=False, help="Defines the language filter. The search results are authomatically returned in that language", type=str, required=False,
choices=['Arabic','Chinese (Simplified)','Chinese (Traditional)','Czech','Danish','Dutch','English','Estonian','Finnish','French','German','Greek','Hebrew','Hungarian','Icelandic','Italian','Japanese','Korean','Latvian','Lithuanian','Norwegian','Portuguese','Polish','Romanian','Russian','Spanish','Swedish','Turkish'])
parser.add_argument('-pr', '--prefix', default=False, help="A word that you would want to prefix in front of each image name", type=str, required=False)
parser.add_argument('-px', '--proxy', help='specify a proxy address and port', type=str, required=False)
parser.add_argument('-cd', '--chromedriver', help='specify the path to chromedriver executable in your local machine', type=str, required=False)
parser.add_argument('-ri', '--related_images', default=False, help="Downloads images that are similar to the keyword provided", action="store_true")
parser.add_argument('-sa', '--safe_search', default=False, help="Turns on the safe search filter while searching for images", action="store_true")
parser.add_argument('-nn', '--no_numbering', default=False, help="Allows you to exclude the default numbering of images", action="store_true")
parser.add_argument('-of', '--offset', help="Where to start in the fetched links", type=str, required=False)
parser.add_argument('-nd', '--no_download', default=False, help="Prints the URLs of the images and/or thumbnails without downloading them", action="store_true")
parser.add_argument('-iu', '--ignore_urls', default=False, help="delimited list input of image urls/keywords to ignore", type=str)
parser.add_argument('-sil', '--silent_mode', default=False, help="Remains silent. Does not print notification messages on the terminal", action="store_true")
parser.add_argument('-is', '--save_source', help="creates a text file containing a list of downloaded images along with source page url", type=str, required=False)
args = parser.parse_args()
arguments = vars(args)
records = []
records.append(arguments)
return records
class googleimagesdownload:
def __init__(self):
pass
# Downloading entire Web Document (Raw Page Content)
def download_page(self,url):
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib.request.Request(url, headers=headers)
resp = urllib.request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print("Could not open URL. Please check your internet connection and/or ssl settings \n"
"If you are using proxy, make sure your proxy settings is configured correctly")
sys.exit()
else: # If the Current Version of Python is 2.x
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers=headers)
try:
response = urllib2.urlopen(req)
except URLError: # Handling SSL certificate failed
context = ssl._create_unverified_context()
response = urlopen(req, context=context)
page = response.read()
return page
except:
print("Could not open URL. Please check your internet connection and/or ssl settings \n"
"If you are using proxy, make sure your proxy settings is configured correctly")
sys.exit()
return "Page Not found"
# Download Page for more than 100 images
def download_extended_page(self,url,chromedriver):
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
if sys.version_info[0] < 3:
reload(sys)
sys.setdefaultencoding('utf8')
options = webdriver.ChromeOptions()
options.add_argument('--no-sandbox')
options.add_argument("--headless")
try:
browser = webdriver.Chrome(chromedriver, chrome_options=options)
except Exception as e:
print("Looks like we cannot locate the path the 'chromedriver' (use the '--chromedriver' "
"argument to specify the path to the executable.) or google chrome browser is not "
"installed on your machine (exception: %s)" % e)
sys.exit()
browser.set_window_size(1024, 768)
# Open the link
browser.get(url)
time.sleep(1)
print("Getting you a lot of images. This may take a few moments...")
element = browser.find_element_by_tag_name("body")
# Scroll down
for i in range(30):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3)
try:
browser.find_element_by_id("smb").click()
for i in range(50):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3) # bot id protection
except:
for i in range(10):
element.send_keys(Keys.PAGE_DOWN)
time.sleep(0.3) # bot id protection
print("Reached end of Page.")
time.sleep(0.5)
source = browser.page_source #page source
#close the browser
browser.close()
return source
#Correcting the escape characters for python2
def replace_with_byte(self,match):
return chr(int(match.group(0)[1:], 8))
def repair(self,brokenjson):
invalid_escape = re.compile(r'\\[0-7]{1,3}') # up to 3 digits for byte values up to FF
return invalid_escape.sub(self.replace_with_byte, brokenjson)
# Finding 'Next Image' from the given raw page
def get_next_tab(self,s):
start_line = s.find('class="dtviD"')
if start_line == -1: # If no links are found then give an error!
end_quote = 0
link = "no_tabs"
return link,'',end_quote
start_line = s.find('class="dtviD"')
start_content = s.find('href="', start_line + 1)
end_content = s.find('">', start_content + 1)
url_item = "https://www.google.com" + str(s[start_content + 6:end_content])
url_item = url_item.replace('&', '&')
start_line_2 = s.find('class="dtviD"')
s = s.replace('&', '&')
start_content_2 = s.find(':', start_line_2 + 1)
end_content_2 = s.find('&usg=', start_content_2 + 1)
url_item_name = str(s[start_content_2 + 1:end_content_2])
chars = url_item_name.find(',g_1:')
chars_end = url_item_name.find(":", chars + 6)
if chars_end == -1:
updated_item_name = (url_item_name[chars + 5:]).replace("+", " ")
else:
updated_item_name = (url_item_name[chars+5:chars_end]).replace("+", " ")
return url_item, updated_item_name, end_content
# Getting all links with the help of '_images_get_next_image'
def get_all_tabs(self,page):
tabs = {}
while True:
item,item_name,end_content = self.get_next_tab(page)
if item == "no_tabs":
break
else:
if len(item_name) > 100 or item_name == "background-color":
break
else:
tabs[item_name] = item # Append all the links in the list named 'Links'
time.sleep(0.1) # Timer could be used to slow down the request for image downloads
page = page[end_content:]
return tabs
#Format the object in readable format
def format_object(self,object):
data = object[1]
main = data[3]
info = data[9]
formatted_object = {}
formatted_object['image_height'] = main[2]
formatted_object['image_width'] = main[1]
formatted_object['image_link'] = main[0]
formatted_object['image_format']=main[0][-1*(len(main[0])-main[0].rfind(".")-1):]
formatted_object['image_description'] = info['2003'][3]
formatted_object['image_host'] = info['183836587'][0]
formatted_object['image_source'] = info['2003'][2]
formatted_object['image_thumbnail_url'] = data[2][0]
return formatted_object
#function to download single image
def single_image(self,image_url):
main_directory = "downloads"
extensions = (".jpg", ".gif", ".png", ".bmp", ".svg", ".webp", ".ico")
url = image_url
try:
os.makedirs(main_directory)
except OSError as e:
if e.errno != 17:
raise
pass
req = Request(url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
response = urlopen(req, None, 10)
data = response.read()
response.close()
image_name = str(url[(url.rfind('/')) + 1:])
if '?' in image_name:
image_name = image_name[:image_name.find('?')]
# if ".jpg" in image_name or ".gif" in image_name or ".png" in image_name or ".bmp" in image_name or ".svg" in image_name or ".webp" in image_name or ".ico" in image_name:
if any(map(lambda extension: extension in image_name, extensions)):
file_name = main_directory + "/" + image_name
else:
file_name = main_directory + "/" + image_name + ".jpg"
image_name = image_name + ".jpg"
try:
output_file = open(file_name, 'wb')
output_file.write(data)
output_file.close()
except IOError as e:
raise e
except OSError as e:
raise e
print("completed ====> " + image_name.encode('raw_unicode_escape').decode('utf-8'))
def similar_images(self,similar_images):
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
try:
searchUrl = 'https://www.google.com/searchbyimage?site=search&sa=X&image_url=' + similar_images
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req1 = urllib.request.Request(searchUrl, headers=headers)
resp1 = urllib.request.urlopen(req1)
content = str(resp1.read())
l1 = content.find('AMhZZ')
l2 = content.find('&', l1)
urll = content[l1:l2]
newurl = "https://www.google.com/search?tbs=sbi:" + urll + "&site=search&sa=X"
req2 = urllib.request.Request(newurl, headers=headers)
resp2 = urllib.request.urlopen(req2)
l3 = content.find('/search?sa=X&q=')
l4 = content.find(';', l3 + 19)
urll2 = content[l3 + 19:l4]
return urll2
except:
return "Cloud not connect to Google Images endpoint"
else: # If the Current Version of Python is 2.x
try:
searchUrl = 'https://www.google.com/searchbyimage?site=search&sa=X&image_url=' + similar_images
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req1 = urllib2.Request(searchUrl, headers=headers)
resp1 = urllib2.urlopen(req1)
content = str(resp1.read())
l1 = content.find('AMhZZ')
l2 = content.find('&', l1)
urll = content[l1:l2]
newurl = "https://www.google.com/search?tbs=sbi:" + urll + "&site=search&sa=X"
req2 = urllib2.Request(newurl, headers=headers)
resp2 = urllib2.urlopen(req2)
l3 = content.find('/search?sa=X&q=')
l4 = content.find(';', l3 + 19)
urll2 = content[l3 + 19:l4]
return(urll2)
except:
return "Cloud not connect to Google Images endpoint"
#Building URL parameters
def build_url_parameters(self,arguments):
if arguments['language']:
lang = "&lr="
lang_param = {"Arabic":"lang_ar","Chinese (Simplified)":"lang_zh-CN","Chinese (Traditional)":"lang_zh-TW","Czech":"lang_cs","Danish":"lang_da","Dutch":"lang_nl","English":"lang_en","Estonian":"lang_et","Finnish":"lang_fi","French":"lang_fr","German":"lang_de","Greek":"lang_el","Hebrew":"lang_iw ","Hungarian":"lang_hu","Icelandic":"lang_is","Italian":"lang_it","Japanese":"lang_ja","Korean":"lang_ko","Latvian":"lang_lv","Lithuanian":"lang_lt","Norwegian":"lang_no","Portuguese":"lang_pt","Polish":"lang_pl","Romanian":"lang_ro","Russian":"lang_ru","Spanish":"lang_es","Swedish":"lang_sv","Turkish":"lang_tr"}
lang_url = lang+lang_param[arguments['language']]
else:
lang_url = ''
if arguments['time_range']:
json_acceptable_string = arguments['time_range'].replace("'", "\"")
d = json.loads(json_acceptable_string)
time_range = ',cdr:1,cd_min:' + d['time_min'] + ',cd_max:' + d['time_max']
else:
time_range = ''
if arguments['exact_size']:
size_array = [x.strip() for x in arguments['exact_size'].split(',')]
exact_size = ",isz:ex,iszw:" + str(size_array[0]) + ",iszh:" + str(size_array[1])
else:
exact_size = ''
built_url = "&tbs="
counter = 0
params = {'color':[arguments['color'],{'red':'ic:specific,isc:red', 'orange':'ic:specific,isc:orange', 'yellow':'ic:specific,isc:yellow', 'green':'ic:specific,isc:green', 'teal':'ic:specific,isc:teel', 'blue':'ic:specific,isc:blue', 'purple':'ic:specific,isc:purple', 'pink':'ic:specific,isc:pink', 'white':'ic:specific,isc:white', 'gray':'ic:specific,isc:gray', 'black':'ic:specific,isc:black', 'brown':'ic:specific,isc:brown'}],
'color_type':[arguments['color_type'],{'full-color':'ic:color', 'black-and-white':'ic:gray','transparent':'ic:trans'}],
'usage_rights':[arguments['usage_rights'],{'labeled-for-reuse-with-modifications':'sur:fmc','labeled-for-reuse':'sur:fc','labeled-for-noncommercial-reuse-with-modification':'sur:fm','labeled-for-nocommercial-reuse':'sur:f'}],
'size':[arguments['size'],{'large':'isz:l','medium':'isz:m','icon':'isz:i','>400*300':'isz:lt,islt:qsvga','>640*480':'isz:lt,islt:vga','>800*600':'isz:lt,islt:svga','>1024*768':'visz:lt,islt:xga','>2MP':'isz:lt,islt:2mp','>4MP':'isz:lt,islt:4mp','>6MP':'isz:lt,islt:6mp','>8MP':'isz:lt,islt:8mp','>10MP':'isz:lt,islt:10mp','>12MP':'isz:lt,islt:12mp','>15MP':'isz:lt,islt:15mp','>20MP':'isz:lt,islt:20mp','>40MP':'isz:lt,islt:40mp','>70MP':'isz:lt,islt:70mp'}],
'type':[arguments['type'],{'face':'itp:face','photo':'itp:photo','clipart':'itp:clipart','line-drawing':'itp:lineart','animated':'itp:animated'}],
'time':[arguments['time'],{'past-24-hours':'qdr:d','past-7-days':'qdr:w','past-month':'qdr:m','past-year':'qdr:y'}],
'aspect_ratio':[arguments['aspect_ratio'],{'tall':'iar:t','square':'iar:s','wide':'iar:w','panoramic':'iar:xw'}],
'format':[arguments['format'],{'jpg':'ift:jpg','gif':'ift:gif','png':'ift:png','bmp':'ift:bmp','svg':'ift:svg','webp':'webp','ico':'ift:ico','raw':'ift:craw'}]}
for key, value in params.items():
if value[0] is not None:
ext_param = value[1][value[0]]
# counter will tell if it is first param added or not
if counter == 0:
# add it to the built url
built_url = built_url + ext_param
counter += 1
else:
built_url = built_url + ',' + ext_param
counter += 1
built_url = lang_url+built_url+exact_size+time_range
return built_url
#building main search URL
def build_search_url(self,search_term,params,url,similar_images,specific_site,safe_search):
#check safe_search
safe_search_string = "&safe=active"
# check the args and choose the URL
if url:
url = url
elif similar_images:
print(similar_images)
keywordem = self.similar_images(similar_images)
url = 'https://www.google.com/search?q=' + keywordem + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
elif specific_site:
url = 'https://www.google.com/search?q=' + quote(
search_term.encode('utf-8')) + '&as_sitesearch=' + specific_site + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch' + params + '&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
else:
url = 'https://www.google.com/search?q=' + quote(
search_term.encode('utf-8')) + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch' + params + '&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
#safe search check
if safe_search:
url = url + safe_search_string
return url
#measures the file size
def file_size(self,file_path):
if os.path.isfile(file_path):
file_info = os.stat(file_path)
size = file_info.st_size
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if size < 1024.0:
return "%3.1f %s" % (size, x)
size /= 1024.0
return size
#keywords from file
def keywords_from_file(self,file_name):
search_keyword = []
with codecs.open(file_name, 'r', encoding='utf-8-sig') as f:
if '.csv' in file_name:
for line in f:
if line in ['\n', '\r\n']:
pass
else:
search_keyword.append(line.replace('\n', '').replace('\r', ''))
elif '.txt' in file_name:
for line in f:
if line in ['\n', '\r\n']:
pass
else:
search_keyword.append(line.replace('\n', '').replace('\r', ''))
else:
print("Invalid file type: Valid file types are either .txt or .csv \n"
"exiting...")
sys.exit()
return search_keyword
# make directories
def create_directories(self,main_directory, dir_name,thumbnail,thumbnail_only):
dir_name_thumbnail = dir_name + " - thumbnail"
# make a search keyword directory
try:
if not os.path.exists(main_directory):
os.makedirs(main_directory)
time.sleep(0.15)
path = (dir_name)
sub_directory = os.path.join(main_directory, path)
if not os.path.exists(sub_directory):
os.makedirs(sub_directory)
if thumbnail or thumbnail_only:
sub_directory_thumbnail = os.path.join(main_directory, dir_name_thumbnail)
if not os.path.exists(sub_directory_thumbnail):
os.makedirs(sub_directory_thumbnail)
else:
path = (dir_name)
sub_directory = os.path.join(main_directory, path)
if not os.path.exists(sub_directory):
os.makedirs(sub_directory)
if thumbnail or thumbnail_only:
sub_directory_thumbnail = os.path.join(main_directory, dir_name_thumbnail)
if not os.path.exists(sub_directory_thumbnail):
os.makedirs(sub_directory_thumbnail)
except OSError as e:
if e.errno != 17:
raise
pass
# Download Image thumbnails
def download_image_thumbnail(self,image_url,main_directory,dir_name,return_image_name,print_urls,socket_timeout,print_size,no_download,save_source,img_src,ignore_urls):
if print_urls or no_download:
print("Image URL: " + image_url)
if no_download:
return "success","Printed url without downloading"
try:
req = Request(image_url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
try:
# timeout time to download an image
if socket_timeout:
timeout = float(socket_timeout)
else:
timeout = 10
response = urlopen(req, None, timeout)
data = response.read()
response.close()
path = main_directory + "/" + dir_name + " - thumbnail" + "/" + return_image_name
try:
output_file = open(path, 'wb')
output_file.write(data)
output_file.close()
if save_source:
list_path = main_directory + "/" + save_source + ".txt"
list_file = open(list_path,'a')
list_file.write(path + '\t' + img_src + '\n')
list_file.close()
except OSError as e:
download_status = 'fail'
download_message = "OSError on an image...trying next one..." + " Error: " + str(e)
except IOError as e:
download_status = 'fail'
download_message = "IOError on an image...trying next one..." + " Error: " + str(e)
download_status = 'success'
download_message = "Completed Image Thumbnail ====> " + return_image_name
# image size parameter
if print_size:
print("Image Size: " + str(self.file_size(path)))
except UnicodeEncodeError as e:
download_status = 'fail'
download_message = "UnicodeEncodeError on an image...trying next one..." + " Error: " + str(e)
except HTTPError as e: # If there is any HTTPError
download_status = 'fail'
download_message = "HTTPError on an image...trying next one..." + " Error: " + str(e)
except URLError as e:
download_status = 'fail'
download_message = "URLError on an image...trying next one..." + " Error: " + str(e)
except ssl.CertificateError as e:
download_status = 'fail'
download_message = "CertificateError on an image...trying next one..." + " Error: " + str(e)
except IOError as e: # If there is any IOError
download_status = 'fail'
download_message = "IOError on an image...trying next one..." + " Error: " + str(e)
return download_status, download_message
# Download Images
def download_image(self,image_url,image_format,main_directory,dir_name,count,print_urls,socket_timeout,prefix,print_size,no_numbering,no_download,save_source,img_src,silent_mode,thumbnail_only,format,ignore_urls):
if not silent_mode:
if print_urls or no_download:
print("Image URL: " + image_url)
if ignore_urls:
if any(url in image_url for url in ignore_urls.split(',')):
return "fail", "Image ignored due to 'ignore url' parameter", None, image_url
if thumbnail_only:
return "success", "Skipping image download...", str(image_url[(image_url.rfind('/')) + 1:]), image_url
if no_download:
return "success","Printed url without downloading",None,image_url
try:
req = Request(image_url, headers={
"User-Agent": "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"})
try:
# timeout time to download an image
if socket_timeout:
timeout = float(socket_timeout)
else:
timeout = 10
response = urlopen(req, None, timeout)
data = response.read()
response.close()
extensions = [".jpg", ".jpeg", ".gif", ".png", ".bmp", ".svg", ".webp", ".ico"]
# keep everything after the last '/'
image_name = str(image_url[(image_url.rfind('/')) + 1:])
if format:
if not image_format or image_format != format:
download_status = 'fail'
download_message = "Wrong image format returned. Skipping..."
return_image_name = ''
absolute_path = ''
return download_status, download_message, return_image_name, absolute_path
if image_format == "" or not image_format or "." + image_format not in extensions:
download_status = 'fail'
download_message = "Invalid or missing image format. Skipping..."
return_image_name = ''
absolute_path = ''
return download_status, download_message, return_image_name, absolute_path
if image_name.lower().find("." + image_format) < 0:
image_name = image_name + "." + image_format
else:
image_name = image_name[:image_name.lower().find("." + image_format) + (len(image_format) + 1)]
# prefix name in image
if prefix:
prefix = prefix + " "
else:
prefix = ''
if no_numbering:
path = main_directory + "/" + dir_name + "/" + prefix + image_name
else:
path = main_directory + "/" + dir_name + "/" + prefix + str(count) + "." + image_name
try:
output_file = open(path, 'wb')
output_file.write(data)
output_file.close()
if save_source:
list_path = main_directory + "/" + save_source + ".txt"
list_file = open(list_path,'a')
list_file.write(path + '\t' + img_src + '\n')
list_file.close()
absolute_path = os.path.abspath(path)
except OSError as e:
download_status = 'fail'
download_message = "OSError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
#return image name back to calling method to use it for thumbnail downloads
download_status = 'success'
download_message = "Completed Image ====> " + prefix + str(count) + "." + image_name
return_image_name = prefix + str(count) + "." + image_name
# image size parameter
if not silent_mode:
if print_size:
print("Image Size: " + str(self.file_size(path)))
except UnicodeEncodeError as e:
download_status = 'fail'
download_message = "UnicodeEncodeError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except URLError as e:
download_status = 'fail'
download_message = "URLError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except BadStatusLine as e:
download_status = 'fail'
download_message = "BadStatusLine on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except HTTPError as e: # If there is any HTTPError
download_status = 'fail'
download_message = "HTTPError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except URLError as e:
download_status = 'fail'
download_message = "URLError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except ssl.CertificateError as e:
download_status = 'fail'
download_message = "CertificateError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except IOError as e: # If there is any IOError
download_status = 'fail'
download_message = "IOError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
except IncompleteRead as e:
download_status = 'fail'
download_message = "IncompleteReadError on an image...trying next one..." + " Error: " + str(e)
return_image_name = ''
absolute_path = ''
return download_status,download_message,return_image_name,absolute_path
# Finding 'Next Image' from the given raw page
def _get_next_item(self,s):
start_line = s.find('rg_meta notranslate')
if start_line == -1: # If no links are found then give an error!
end_quote = 0
link = "no_links"
return link, end_quote
start_line = s.find('class="rg_meta notranslate">')
start_object = s.find('{', start_line + 1)
end_object = s.find('</div>', start_object + 1)
object_raw = str(s[start_object:end_object])
#remove escape characters based on python version
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: #python3
try:
object_decode = bytes(object_raw, "utf-8").decode("unicode_escape")
final_object = json.loads(object_decode)
except:
final_object = ""
else: #python2
try:
final_object = (json.loads(self.repair(object_raw)))
except:
final_object = ""
return final_object, end_object
# Getting all links with the help of '_images_get_next_image'
def _get_image_objects(self,s):
start_line = s.find("AF_initDataCallback({key: \\'ds:1\\'") - 10
start_object = s.find('[', start_line + 1)
end_object = s.find('</script>', start_object + 1) - 4
object_raw = str(s[start_object:end_object])
object_decode = bytes(object_raw[:-1], "utf-8").decode("unicode_escape")
image_objects = json.loads(object_decode[:-15])[31][0][12][2]
return image_objects
def _get_all_items(self,page,main_directory,dir_name,limit,arguments):
items = []
abs_path = []
errorCount = 0
i = 0
count = 1
image_objects = self._get_image_objects(page)
while count < limit+1:
if len(image_objects) == 0:
print("no_links")
break
else:
#format the item for readability
object = self.format_object(image_objects[i])
if arguments['metadata']:
if not arguments["silent_mode"]:
print("\nImage Metadata: " + str(object))
#download the images
download_status,download_message,return_image_name,absolute_path = self.download_image(object['image_link'],object['image_format'],main_directory,dir_name,count,arguments['print_urls'],arguments['socket_timeout'],arguments['prefix'],arguments['print_size'],arguments['no_numbering'],arguments['no_download'],arguments['save_source'],object['image_source'],arguments["silent_mode"],arguments["thumbnail_only"],arguments['format'],arguments['ignore_urls'])
if not arguments["silent_mode"]:
print(download_message)
if download_status == "success":
# download image_thumbnails
if arguments['thumbnail'] or arguments["thumbnail_only"]:
download_status, download_message_thumbnail = self.download_image_thumbnail(object['image_thumbnail_url'],main_directory,dir_name,return_image_name,arguments['print_urls'],arguments['socket_timeout'],arguments['print_size'],arguments['no_download'],arguments['save_source'],object['image_source'],arguments['ignore_urls'])
if not arguments["silent_mode"]:
print(download_message_thumbnail)
count += 1
object['image_filename'] = return_image_name
items.append(object) # Append all the links in the list named 'Links'
abs_path.append(absolute_path)
else:
errorCount += 1
#delay param
if arguments['delay']:
time.sleep(int(arguments['delay']))
i += 1
if count < limit:
print("\n\nUnfortunately all " + str(
limit) + " could not be downloaded because some images were not downloadable. " + str(
count-1) + " is all we got for this search filter!")
return items,errorCount,abs_path
# Bulk Download
def download(self,arguments):
paths_agg = {}
# for input coming from other python files
if __name__ != "__main__":
# if the calling file contains config_file param
if 'config_file' in arguments:
records = []
json_file = json.load(open(arguments['config_file']))
for record in range(0, len(json_file['Records'])):
arguments = {}
for i in args_list:
arguments[i] = None
for key, value in json_file['Records'][record].items():
arguments[key] = value
records.append(arguments)
total_errors = 0
for rec in records:
paths, errors = self.download_executor(rec)
for i in paths:
paths_agg[i] = paths[i]
if not arguments["silent_mode"]:
if arguments['print_paths']:
print(paths.encode('raw_unicode_escape').decode('utf-8'))
total_errors = total_errors + errors
return paths_agg,total_errors
# if the calling file contains params directly
paths, errors = self.download_executor(arguments)
for i in paths:
paths_agg[i] = paths[i]
if not arguments["silent_mode"]:
if arguments['print_paths']:
print(paths.encode('raw_unicode_escape').decode('utf-8'))
return paths_agg, errors
# for input coming from CLI
paths, errors = self.download_executor(arguments)
for i in paths:
paths_agg[i] = paths[i]
if not arguments["silent_mode"]:
if arguments['print_paths']:
print(paths.encode('raw_unicode_escape').decode('utf-8'))
return paths_agg, errors
def download_executor(self,arguments):
paths = {}
errorCount = None
for arg in args_list:
if arg not in arguments:
arguments[arg] = None
######Initialization and Validation of user arguments
if arguments['keywords']:
search_keyword = [str(item) for item in arguments['keywords'].split(',')]
if arguments['keywords_from_file']:
search_keyword = self.keywords_from_file(arguments['keywords_from_file'])
# both time and time range should not be allowed in the same query
if arguments['time'] and arguments['time_range']:
raise ValueError('Either time or time range should be used in a query. Both cannot be used at the same time.')
# both time and time range should not be allowed in the same query
if arguments['size'] and arguments['exact_size']:
raise ValueError('Either "size" or "exact_size" should be used in a query. Both cannot be used at the same time.')
# both image directory and no image directory should not be allowed in the same query
if arguments['image_directory'] and arguments['no_directory']:
raise ValueError('You can either specify image directory or specify no image directory, not both!')
# Additional words added to keywords
if arguments['suffix_keywords']:
suffix_keywords = [" " + str(sk) for sk in arguments['suffix_keywords'].split(',')]
else:
suffix_keywords = ['']
# Additional words added to keywords
if arguments['prefix_keywords']:
prefix_keywords = [str(sk) + " " for sk in arguments['prefix_keywords'].split(',')]
else:
prefix_keywords = ['']
# Setting limit on number of images to be downloaded
if arguments['limit']:
limit = int(arguments['limit'])
else:
limit = 100
if arguments['url']:
current_time = str(datetime.datetime.now()).split('.')[0]
search_keyword = [current_time.replace(":", "_")]
if arguments['similar_images']:
current_time = str(datetime.datetime.now()).split('.')[0]
search_keyword = [current_time.replace(":", "_")]
# If single_image or url argument not present then keywords is mandatory argument
if arguments['single_image'] is None and arguments['url'] is None and arguments['similar_images'] is None and \
arguments['keywords'] is None and arguments['keywords_from_file'] is None:
print('-------------------------------\n'
'Uh oh! Keywords is a required argument \n\n'
'Please refer to the documentation on guide to writing queries \n'
'https://github.com/hardikvasa/google-images-download#examples'
'\n\nexiting!\n'
'-------------------------------')
sys.exit()
# If this argument is present, set the custom output directory
if arguments['output_directory']:
main_directory = arguments['output_directory']
else:
main_directory = "downloads"
# Proxy settings
if arguments['proxy']:
os.environ["http_proxy"] = arguments['proxy']
os.environ["https_proxy"] = arguments['proxy']
######Initialization Complete
total_errors = 0
for pky in prefix_keywords: # 1.for every prefix keywords
for sky in suffix_keywords: # 2.for every suffix keywords
i = 0
while i < len(search_keyword): # 3.for every main keyword
iteration = "\n" + "Item no.: " + str(i + 1) + " -->" + " Item name = " + (pky) + (search_keyword[i]) + (sky)
if not arguments["silent_mode"]:
print(iteration.encode('raw_unicode_escape').decode('utf-8'))
print("Evaluating...")
else:
print("Downloading images for: " + (pky) + (search_keyword[i]) + (sky) + " ...")
search_term = pky + search_keyword[i] + sky
if arguments['image_directory']:
dir_name = arguments['image_directory']
elif arguments['no_directory']:
dir_name = ''
else:
dir_name = search_term + ('-' + arguments['color'] if arguments['color'] else '') #sub-directory
if not arguments["no_download"]:
self.create_directories(main_directory,dir_name,arguments['thumbnail'],arguments['thumbnail_only']) #create directories in OS
params = self.build_url_parameters(arguments) #building URL with params
url = self.build_search_url(search_term,params,arguments['url'],arguments['similar_images'],arguments['specific_site'],arguments['safe_search']) #building main search url
if limit < 101:
raw_html = self.download_page(url) # download page
else:
raw_html = self.download_extended_page(url,arguments['chromedriver'])
if not arguments["silent_mode"]:
if arguments['no_download']:
print("Getting URLs without downloading images...")
else:
print("Starting Download...")
items,errorCount,abs_path = self._get_all_items(raw_html,main_directory,dir_name,limit,arguments) #get all image items and download images
paths[pky + search_keyword[i] + sky] = abs_path
#dumps into a json file
if arguments['extract_metadata']:
try:
if not os.path.exists("logs"):
os.makedirs("logs")
except OSError as e:
print(e)
json_file = open("logs/"+search_keyword[i]+".json", "w")
json.dump(items, json_file, indent=4, sort_keys=True)
json_file.close()
#Related images
if arguments['related_images']:
print("\nGetting list of related keywords...this may take a few moments")
tabs = self.get_all_tabs(raw_html)
for key, value in tabs.items():
final_search_term = (search_term + " - " + key)
print("\nNow Downloading - " + final_search_term)
if limit < 101:
new_raw_html = self.download_page(value) # download page
else:
new_raw_html = self.download_extended_page(value,arguments['chromedriver'])
self.create_directories(main_directory, final_search_term,arguments['thumbnail'],arguments['thumbnail_only'])
self._get_all_items(new_raw_html, main_directory, search_term + " - " + key, limit,arguments)
i += 1
total_errors = total_errors + errorCount
if not arguments["silent_mode"]:
print("\nErrors: " + str(errorCount) + "\n")
return paths, total_errors
#------------- Main Program -------------#
def main():
records = user_input()
total_errors = 0
t0 = time.time() # start the timer
for arguments in records:
if arguments['single_image']: # Download Single Image using a URL
response = googleimagesdownload()
response.single_image(arguments['single_image'])
else: # or download multiple images based on keywords/keyphrase search
response = googleimagesdownload()
paths,errors = response.download(arguments) #wrapping response in a variable just for consistency
total_errors = total_errors + errors
t1 = time.time() # stop the timer
total_time = t1 - t0 # Calculating the total time required to crawl, find and download all the links of 60,000 images
if not arguments["silent_mode"]:
print("\nEverything downloaded!")
print("Total errors: " + str(total_errors))
print("Total time taken: " + str(total_time) + " Seconds")
if __name__ == "__main__":
main()
| 52.145689
| 622
| 0.574703
|
8766dffe73edf443043c12e35092d922cfc97e76
| 17,366
|
py
|
Python
|
operations.py
|
JStuckner/smart_tem
|
5381ba1fa5b74784e520bd0f07e420347959f72e
|
[
"MIT"
] | 1
|
2021-07-28T05:46:33.000Z
|
2021-07-28T05:46:33.000Z
|
operations.py
|
JStuckner/smart_tem
|
5381ba1fa5b74784e520bd0f07e420347959f72e
|
[
"MIT"
] | null | null | null |
operations.py
|
JStuckner/smart_tem
|
5381ba1fa5b74784e520bd0f07e420347959f72e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module applies global filters to the image sets
Author: Joshua Stuckner
Date: 2017/06/21
"""
import warnings
import sys
import time
import math
import warnings
from scipy.ndimage import filters
from scipy.misc import imresize, imsave
import scipy.ndimage as ndim
from skimage import restoration, morphology
from skimage.filters import threshold_otsu
import numpy as np
import matplotlib.pyplot as plt
import inout
import visualize
# update_progress() : Displays or updates a console progress bar
# Accepts a float between 0 and 1. Any int will be converted to a float.
# A value under 0 represents a 'halt'.
# A value at 1 or bigger represents 100%
def update_progress(progress):
barlength = 20 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(barlength*progress))
text = "\rPercent: [{0}] {1}% {2}".format(
"#"*block + "-"*(barlength-block), round(progress*100, 1), status)
sys.stdout.write(text)
sys.stdout.flush()
def gaussian_stacking(image_set, sigma=1):
"""
Stacks the images using a Gaussian filter
This function is a wrapper around scipy.ndimage.filters.gaussian_filter
"""
print('Applying Guassian stacking...', end=' ')
start = time.time()
out = filters.gaussian_filter(image_set, (0, 0, sigma))
print('Done, took', round(time.time()-start, 2), 'seconds.')
return out
def gaussian(image_set, sigma=None):
"""
Applies a gaussian filter to the image set.
This function is a wrapper around scipy.ndimage.filters.gaussian_filter
"""
if sigma is None:
print('Please set a filter radius (sigma). Defaulting to sigma = 1.')
sigma = 1
print('Applying guassian filter...', end=' ')
start = time.time()
try:
out = filters.gaussian_filter(image_set, (sigma, sigma, 0))
except RuntimeError: # When there is only one frame
out = filters.gaussian_filter(image_set, (sigma, sigma))
print('Done, took', round(time.time()-start, 2), 'seconds.')
return out
def median_stacking(image_set, size=3):
print('Applying median stacking...', end=' ')
start = time.time()
out = filters.median_filter(image_set, size=(1,1,size))
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out
def median(image_set, size=None):
"""
Applies a median filter to the image set.
This function is a wrapper around scipy.ndimage.filters.median_filter
"""
if size is None:
print('Please set a filter size. Defaulting to size = 3.')
size = 3
print('Applying median filter...', end=' ')
start = time.time()
try:
out = filters.median_filter(image_set, size=(size, size, 1))
except RuntimeError: # When there is only one frame
out = filters.median_filter(image_set, size=(size, size))
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out
def down_sample(image_set, pixel_size, nyquist=3, d=0.1, source_sigma=0.5,
dtype='uint8'):
"""
This function performs Gaussian downsampling on the frames. The goal is
to resample the image to a pixel size that is between 2.3 and 3 times
smaller than the point to point resolution of the microscope. This is
in accordance with the Nyquist-Shannon sampling theorem. The optimum
value of the Gaussian filter sigma is calculated.
:param image_set:
3D ndarray. Contains the image frames.
:param pixel_size:
The size of the pixels in nanometers.
:param d: optional
The point to point resolution of the microscope.
:param nyquist: optional
Should be between 2.3 - 3.
Sets the number of pixels per d
:param source_sigma: optional
Set higher than 0.5 if the image was already blurred and a sharper
downsampled result is desired.
:return:
The downsampled dataset
"""
try:
rows, cols, num_frames = image_set.shape
except ValueError:
rows, cols = image_set.shape
num_frames = 1
target_pixel_size = d / nyquist
target_sigma = 0.5
scale = pixel_size / target_pixel_size
if pixel_size >= target_pixel_size:
print('Down sampling will not reach the target (need interpolation).')
return image_set
else:
# Calculate the optimum Gaussian filter sigma.
s = target_sigma * target_pixel_size / pixel_size
gsigma = math.sqrt(s**2 - source_sigma**2)
# Apply the gaussian filter.
out = gaussian(image_set, gsigma)
print('Downsampling...', end=' ')
start = time.time()
if num_frames == 1:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out = ndim.interpolation.zoom(out, (scale,scale),
order = 3, prefilter=True,
output=dtype)
else:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out = ndim.interpolation.zoom(out, (scale, scale, 1),
order= 3, prefilter=True,
output=dtype)
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out
def tv_chambolle(image_set, weight, max_stacking=None, eps=0.0002, verbose=True):
"""Performs chambolle filtering using the skimage library.
:param image_set:
:param weight:
:param eps:
:param max_stacking:
:return:
"""
if verbose:
print('Applying Total Variation Chambolle filter...', end=' ', flush=True)
start = time.time()
try:
rows, cols, num_frames = image_set.shape
except ValueError:
rows, cols = image_set.shape
num_frames = 1
if num_frames == 1:
if verbose:
print("Performing filter on the single frame passed...", end=" ")
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out = inout.uint8(restoration.denoise_tv_chambolle(
image_set, weight=weight, eps=eps))
elif max_stacking is None or max_stacking < 0:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out = inout.uint8(restoration.denoise_tv_chambolle(
image_set, weight=weight, eps=eps))
elif not isinstance(max_stacking, int):
print("Max stacking must be an odd integer")
return image_set
elif num_frames <= max_stacking:
print(("Total number of frames is <= max_stacking. "
"Stacking not limited."))
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out = inout.uint8(restoration.denoise_tv_chambolle(
image_set, weight=weight, eps=eps))
elif max_stacking < 1:
print("max_stacking must be greater than 0.")
return image_set
else:
out = np.zeros((rows, cols, num_frames))
if max_stacking == 1:
for i in range(num_frames):
if verbose:
if i % 5 == 0:
visualize.update_progress(i / num_frames)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out[:, :, i] = inout.uint8(restoration.denoise_tv_chambolle(
image_set[:, :, i],
weight=weight, eps=eps))
else:
half_max = int(max_stacking / 2)
adjust = 1 if max_stacking % 2 == 0 else 0 # subtract 1 from bottom half for even stacking
for i in range(num_frames):
if verbose:
if i % 5 == 0:
visualize.update_progress(i / num_frames)
if i <= half_max - adjust:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out[:,:,i] = inout.uint8(
restoration.denoise_tv_chambolle(
image_set[:,:,:max_stacking-1],
weight=weight, eps=eps))[:,:,i]
elif num_frames - i <= half_max + 1:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out[:, :, i] = inout.uint8(
restoration.denoise_tv_chambolle(
image_set[:,:,-max_stacking:],
weight=weight, eps=eps))[:,:,-1*(num_frames-i)]
else:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
out[:, :, i] = inout.uint8(
restoration.denoise_tv_chambolle(
image_set[:,:,i-half_max+adjust:i+half_max],
weight=weight, eps=eps))[:,:,half_max-adjust]
if verbose:
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out.astype('uint8')
def tv_bregman(image_set, weight, eps, max_iter=100, isotropic=True):
"""Performs Bregman denoising using the skimage library.
:param image_set:
:param weight:
:param eps:
:param max_stacking:
:return:
"""
print('Applying Total Variation Bregman...', end=' ')
start = time.time()
out = inout.uint8(restoration.denoise_tv_bregman(
image_set, weight=weight, eps=eps, isotropic=isotropic,
max_iter=max_iter))
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out.astype('uint8')
def normalize(im, std=-1, verbose=True):
im = im.astype('float')
if std < 0:
minval = im.min()
maxval = im.max()
else:
shift = std * np.std(im)
minval = int(round(np.average(im) - shift,0))
maxval = int(round(np.average(im) + shift,0))
if minval != maxval:
im -= minval
im *= (255.0/(maxval-minval))
im = np.clip(im, 0.0, 255.0)
im = im.astype('uint8')
if verbose:
print('Levels balanced.')
return im
def get_background_mask(image_set):
stacked = np.average(image_set, axis=2) # Stack all images
time_SD = np.std(image_set, axis=2) #variation of each pixel through time
std_mult = 1 #adjust threshold value
thresh = time_SD > threshold_otsu(time_SD) + std_mult * np.std(time_SD)
small = 10 #size of small objects to remove for first round
removed = morphology.remove_small_objects(thresh, small)
selem_radius = 3 #dialtion radius
dial = morphology.binary_dilation(
removed, selem=morphology.disk(selem_radius))
small2 = 300 #size of small objects to remove for second round
removed2 = morphology.remove_small_objects(dial, small2)
fr = 30 #final closing radius
pad = np.pad(removed2, fr, mode='constant') #prevents over erosion near edge
final_mask = morphology.binary_closing(
pad, selem=morphology.disk(fr))[fr:-fr, fr:-fr]
area = np.count_nonzero(final_mask)
rad = math.sqrt(area/3.14)
fdr = int(round(rad/5)) #final dilation radius
print(fdr)
final_mask = morphology.binary_dilation(
final_mask, selem=morphology.disk(fdr))
mask3D = np.zeros(image_set.shape, dtype=bool)
mask3D[:,:,:] = final_mask[:,:, np.newaxis]
#image_set[mask3D == False] = set_to
## ims = [stacked,
## time_SD,
## thresh,
## removed,
## dial,
## removed2,
## final_mask]
## titles = ["Stacked",
## "Time St. Dev.",
## "Thresholded",
## "Remove small",
## "Dialated",
## "Remove 2",
## "Final mask"]
#visualize.plot_grid(ims,titles, rows=2, cols=4)
#visualize.play_movie(image_set)
return mask3D
def unsharp_mask(im, sigma, threshold, amount):
print('Applying Unsharp Mask...', end=' ')
start = time.time()
try:
rows, cols, num_frames = im.shape
except ValueError:
rows, cols = im.shape
num_frames = 1
if amount > 0.9 or amount < 0.1:
print("'amount' should be between 0.1 and 0.9!")
if num_frames == 1:
blurred = filters.gaussian_filter(im, (sigma, sigma))
lowContrastMask = abs(im - blurred) < threshold
sharpened = im*(1+amount) + blurred*(-amount)
locs = np.where(lowContrastMask != 0)
out = im.copy()
out[locs[0], locs[1]] = np.clip(sharpened[locs[0], locs[1]], 0, 255)
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out
else:
out = im.copy()
blurred = filters.gaussian_filter(im, (sigma, sigma, 0))
for i in range(num_frames):
lowContrastMask = abs(im[:,:,i] - blurred[:,:,i]) < threshold
sharpened = im[:,:,i]*(1+amount) + blurred[:,:,i]*(-amount)
locs = np.where(lowContrastMask != 0)
out[:,:,i][locs[0], locs[1]] = np.clip(sharpened[locs[0], locs[1]], 0, 255)
print('Done, took', round(time.time() - start, 2), 'seconds.')
return out
def blur_background(im, sigma=None, thresh=None,
small1=None, dil_rad=None,
small2=None, close_rad=None):
rows, cols, _ = im.shape
size = (rows + cols) / 2
# get standard deviation
std = np.std(im, axis=2)
# threshold
if thresh is None:
thresh = int(threshold_otsu(std) * 0.75)
binary = std > thresh
# set values if none
sigma = size/100 if sigma == None else sigma
small1 = int(round(size/10)) if small1 == None else small1
dil_rad = int(round(size/100)) if dil_rad == None else dil_rad
small2 = int(round(size)) if small2 == None else small2
close_rad = int(round(size/20)) if close_rad == None else close_rad
#morphology operations
morph = binary.copy()
selem_radius = 1
morph = morphology.remove_small_objects(morph, small1)
morph = morphology.binary_dilation(
morph, selem=morphology.disk(dil_rad))
morph = morphology.remove_small_objects(morph, small2)
fr = close_rad #final closing radius
pad = np.pad(morph, fr, mode='constant') #prevents over erosion near edge
morph = morphology.binary_closing(
pad, selem=morphology.disk(fr))[fr:-fr, fr:-fr]
# make location masks
mask = np.where(morph != 0)
notmask = np.where(morph == 0)
# blur the not mask
allBlur = gaussian(im, sigma)
blur = im.copy()
blur[notmask[0], notmask[1], :] = allBlur[notmask[0], notmask[1], :]
return blur
def remove_outliers(image_set, percent=0.1, size=3):
"""
Replaces pixels in the top and bottom percentage of the pixels in each
image with a median pixel value of the pixels in a window of
size by size pixels.
"""
print('Removing outliers...', end=' ')
start = time.time()
try:
rows, cols, num_frames = image_set.shape
except ValueError:
rows, cols = image_set.shape
num_frames = 1
for i in range(num_frames):
if i % 10 == 0:
visualize.update_progress(i / num_frames)
im = image_set[:,:,i]
#Create a median filter
med = filters.median_filter(im, size=size)
# Create the outlier mask
low_outlier = np.percentile(im, percent)
high_outlier = np.percentile(im, 100-percent)
mask = np.zeros((rows,cols), dtype='bool')
mask[im >= high_outlier] = 1
mask[im <= low_outlier] = 1
# If there are many outliers together, they are probably not outliers.
# scale the amount to multiply ther percent.
if percent > 9:
pmult = 2
elif percent > 1:
pmult = 3
else:
pmult = 4
low_outlier = np.percentile(im, percent * pmult)
high_outlier = np.percentile(im, 100 - percent * pmult)
mask2 = np.zeros((rows,cols), dtype='bool')
mask2[im >= high_outlier] = 1
mask2[im <= low_outlier] = 1
mask2 = morphology.remove_small_objects(mask2, 12)
mask[mask2==1] = 0
im[mask == 1] = med[mask==1]
print('Done, took', round(time.time() - start, 2), 'seconds.')
return image_set
| 36.10395
| 103
| 0.572843
|
499c6e4d851ca3c855396bcccf78b9281d8625f1
| 3,843
|
py
|
Python
|
desktop/core/ext-py/sqlalchemy-vertica-python-0.5.8/setup.py
|
aroville/hue
|
63f5f9bcd18f9e76be1983a56137a30cbd96e49d
|
[
"Apache-2.0"
] | 1
|
2021-04-16T19:53:43.000Z
|
2021-04-16T19:53:43.000Z
|
desktop/core/ext-py/sqlalchemy-vertica-python-0.5.8/setup.py
|
aroville/hue
|
63f5f9bcd18f9e76be1983a56137a30cbd96e49d
|
[
"Apache-2.0"
] | null | null | null |
desktop/core/ext-py/sqlalchemy-vertica-python-0.5.8/setup.py
|
aroville/hue
|
63f5f9bcd18f9e76be1983a56137a30cbd96e49d
|
[
"Apache-2.0"
] | 4
|
2020-06-01T06:00:49.000Z
|
2021-01-13T18:16:34.000Z
|
from decimal import Decimal
from distutils.cmd import Command
from setuptools import setup
from setuptools.command.install import install
import os.path
import sys
__version__ = '0.5.8'
# From https://circleci.com/blog/continuously-deploying-python-packages-to-pypi-with-circleci/
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'verify that the git tag matches our version'
def run(self):
from setup import __version__
tag = os.getenv('CIRCLE_TAG')
tag_formatted_version = 'v{}'.format(__version__)
if tag != tag_formatted_version:
info = "Git tag: {0} does not match the version of this app: {1}".format(
tag, __version__
)
sys.exit(info)
class CoverageRatchetCommand(Command):
description = 'Run coverage ratchet'
user_options = [] # type: ignore
def finalize_options(self):
pass
def run(self):
"""Run command."""
import xml.etree.ElementTree as ET
tree = ET.parse(self.coverage_source_file)
new_coverage = Decimal(tree.getroot().attrib["line-rate"]) * 100
if not os.path.exists(self.coverage_file):
with open(self.coverage_file, 'w') as f:
f.write('0')
with open(self.coverage_file, 'r') as f:
high_water_mark = Decimal(f.read())
if new_coverage < high_water_mark:
raise Exception(
"{} coverage used to be {}; "
"down to {}%. Fix by viewing '{}'".format(self.type_of_coverage,
high_water_mark,
new_coverage,
self.coverage_url))
elif new_coverage > high_water_mark:
with open(self.coverage_file, 'w') as f:
f.write(str(new_coverage))
print("Just ratcheted coverage up to {}%".format(new_coverage))
else:
print("Code coverage steady at {}%".format(new_coverage))
class TestCoverageRatchetCommand(CoverageRatchetCommand):
def initialize_options(self):
"""Set default values for options."""
self.type_of_coverage = 'Test'
self.coverage_url = 'cover/index.html'
self.coverage_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'metrics',
'coverage_high_water_mark'
)
self.coverage_source_file = "coverage.xml"
setup(
name='sqlalchemy-vertica-python',
version=__version__,
description='Vertica dialect for sqlalchemy using vertica_python',
long_description=open("README.rst").read(),
license="MIT",
url='https://github.com/bluelabsio/sqlalchemy-vertica-python',
download_url = 'https://github.com/bluelabsio/sqlalchemy-vertica-python/tarball/{}'.format(__version__),
author='James Casbon, Luke Emery-Fertitta',
maintainer='Vince Broz',
maintainer_email='opensource@bluelabs.com',
packages=[
'sqla_vertica_python',
],
keywords=['sqlalchemy', 'vertica', 'python'],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
entry_points="""
[sqlalchemy.dialects]
vertica.vertica_python = sqla_vertica_python.vertica_python:VerticaDialect
""",
install_requires=[
'vertica_python'
],
cmdclass={
'coverage_ratchet': TestCoverageRatchetCommand,
'verify': VerifyVersionCommand,
},
)
| 34.00885
| 108
| 0.608379
|
606a8b21c1b51288e6d9a34e34efdb66daefdfd2
| 5,252
|
py
|
Python
|
bitbots_bringup/scripts/game_settings.py
|
MosHumanoid/bitbots_misc
|
0435dd2875cde68d625932a4408423958865c502
|
[
"MIT"
] | null | null | null |
bitbots_bringup/scripts/game_settings.py
|
MosHumanoid/bitbots_misc
|
0435dd2875cde68d625932a4408423958865c502
|
[
"MIT"
] | null | null | null |
bitbots_bringup/scripts/game_settings.py
|
MosHumanoid/bitbots_misc
|
0435dd2875cde68d625932a4408423958865c502
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import sys
import yaml
import rospy
import rospkg
import os
import roslaunch
rospack = rospkg.RosPack()
# every parameter has its own SETTING_PATH
SETTING_PATH = rospack.get_path("bitbots_bringup") + "/config/game_settings.yaml"
TO_BE_SET_PATH = rospack.get_path("bitbots_bringup") + "/config/to_be_set_game_settings.yaml"
def provide_config(path):
"""
reads out the yaml you are asking for with the path parameter
:param path: filepath for your yaml
:return: config as dict
"""
if os.path.exists(path):
try:
with open(path, 'r') as f:
config = yaml.load(f)
except yaml.YAMLError as exc:
print("Error in configuration file:", exc)
else:
config = {}
print("The config yaml with path {}, does not exist.".format(path))
return config
def ask_for_config_option(name: object, definition: object, current_value: object = None, explanation: object = None) -> object:
"""
:param name: name of the config-option-value e.g. robot number
:param definition: possible options for the value, type of input
:param current_value: the already set value
:param explanation: describes options
:return: new chosen value for this config option, can be the old one
"""
print('=============== {} ==============='.format(name))
print("Options: {}".format(definition))
print("Explanations: {}".format(explanation))
if current_value is not None:
input_prompt = 'Value ({}): '.format(current_value)
else:
input_prompt = 'Value: '
value_is_valid = False
while not value_is_valid:
new_value = input(input_prompt).lower()
if new_value == '':
new_value = current_value
value_is_valid = True
else:
value_is_valid = check_new_value(new_value, definition)
print()
def_type = type(definition[0])
return def_type(new_value)
def check_new_value(new_value: str, definition) -> bool:
"""
checks with definition if new value is a valid input
:param new_value: input to set as new value
:param definition: valid options for new value
:return: true if valid, false if not
"""
if type(definition) is range:
definition = list(definition)
if definition == "custom":
return True
definitiontype = type(definition[0])
try:
new_value = definitiontype(new_value) # casts value to the type of
except:
print("{} could not be converted to a {}. Are you sure it is in the right format?".format(new_value,definitiontype))
if type(definition) is list:
if new_value in definition:
return True
else:
# print(new_value, definition)
print(' {} no valid option'.format(new_value))
# print(type(definition[0]))
return False
elif definition is bool:
if new_value == "true" or new_value == "false":
return True
else:
return False
elif definition is int:
try:
int(new_value)
return True
except ValueError:
return False
elif definition is float:
try:
float(new_value)
return True
except ValueError:
return False
elif definition is str:
return True
else:
# We could not validate the type or values so we assume it is incorrect
return False
if __name__ == '__main__':
rospy.init_node("game_settings")
config = provide_config(SETTING_PATH)
# every option for a config-value is listed here
'''
options = {
#'playernumber': {'package': 'bla', 'file': 'doc', 'parameter': 'playernumber', 'options': ['1', '2', '3', '4']},
'bot_id': {'package': 'humanoid_league_game_controller', 'file': '/config/game_controller.yaml', 'options': ['1', '2', '3', '4', '5']},
'team_id': {'package': 'humanoid_league_game_controller', 'file': '/config/game_controller.yaml', 'options': ['1', '2', '3', '4', '5']}
}
'''
to_be_set = provide_config(TO_BE_SET_PATH)
for key, value in to_be_set.items():
if key in config.keys():
config[key] = ask_for_config_option(key, to_be_set[key]['options'], config[key], to_be_set[key]['explanation'])
else:
config[key] = to_be_set[key].copy()
del config[key]['options']
del config[key]['explanation']
del config[key]['file']
del config[key]['package']
config[key] = ask_for_config_option(key, to_be_set[key]['options'], to_be_set[key]['explanation'])
with open(SETTING_PATH, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
if len(sys.argv) == 1 or sys.argv[1] != '--no-teamplayer':
start_teamplayer = input("Do you want to launch 'teamplayer.launch'? (y/N)")
if start_teamplayer.lower() == "y":
uuid = roslaunch.rlutil.get_or_generate_uuid(None, False)
roslaunch.configure_logging(uuid)
launch = roslaunch.parent.ROSLaunchParent(uuid, [rospack.get_path("bitbots_bringup") + "/launch/teamplayer.launch"])
launch.start()
| 33.031447
| 143
| 0.616908
|
3ccbd3a0c20a489a69ce5b93ee3c020dfa8bfab7
| 2,044
|
py
|
Python
|
cohesity_management_sdk/models/view_privileges.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 18
|
2019-09-24T17:35:53.000Z
|
2022-03-25T08:08:47.000Z
|
cohesity_management_sdk/models/view_privileges.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 18
|
2019-03-29T19:32:29.000Z
|
2022-01-03T23:16:45.000Z
|
cohesity_management_sdk/models/view_privileges.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 16
|
2019-02-27T06:54:12.000Z
|
2021-11-16T18:10:24.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
class ViewPrivileges(object):
"""Implementation of the 'ViewPrivileges' model.
ViewPrivileges specifies which views are allowed to be accessed by an app
instance.
Attributes:
privileges_type (PrivilegesTypeViewPrivilegesEnum): Specifies if all,
none or specific views are allowed to be accessed.
Specifies if all, none or specific views are allowed to be
accessed.
kNone - None of the views have access.
kAll - All the views have access.
kSpecific - Only specific views have access.
view_ids (list of int): Specifies the ids of the views which are
allowed to be accessed in case the privilege type is kSpecific.
"""
# Create a mapping from Model property names to API property names
_names = {
"privileges_type": 'privilegesType',
"view_ids": 'viewIds'
}
def __init__(self,
privileges_type=None,
view_ids=None):
"""Constructor for the ViewPrivileges class"""
# Initialize members of the class
self.privileges_type = privileges_type
self.view_ids = view_ids
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
privileges_type = dictionary.get('privilegesType', None)
view_ids = dictionary.get('viewIds', None)
# Return an object of this model
return cls(privileges_type,
view_ids)
| 30.507463
| 81
| 0.624755
|
f907ce92d42a26d992f1a6d46d12bcfd156f3b65
| 4,645
|
py
|
Python
|
env/lib/python3.8/site-packages/unidecode/x088.py
|
avdhari/enigma
|
b7e965a91ca5f0e929c4c719d695f15ccb8b5a2c
|
[
"MIT"
] | 48
|
2021-11-20T08:17:53.000Z
|
2022-03-19T13:57:15.000Z
|
venv/lib/python3.6/site-packages/unidecode/x088.py
|
mrsaicharan1/iiita-updates
|
a22a0157b90d29b946d0f020e5f76744f73a6bff
|
[
"Apache-2.0"
] | 392
|
2015-07-30T14:37:05.000Z
|
2022-03-21T16:56:09.000Z
|
venv/lib/python3.6/site-packages/unidecode/x088.py
|
mrsaicharan1/iiita-updates
|
a22a0157b90d29b946d0f020e5f76744f73a6bff
|
[
"Apache-2.0"
] | 15
|
2015-10-01T21:31:08.000Z
|
2020-05-05T00:03:27.000Z
|
data = (
'Ci ', # 0x00
'Xiang ', # 0x01
'She ', # 0x02
'Luo ', # 0x03
'Qin ', # 0x04
'Ying ', # 0x05
'Chai ', # 0x06
'Li ', # 0x07
'Ze ', # 0x08
'Xuan ', # 0x09
'Lian ', # 0x0a
'Zhu ', # 0x0b
'Ze ', # 0x0c
'Xie ', # 0x0d
'Mang ', # 0x0e
'Xie ', # 0x0f
'Qi ', # 0x10
'Rong ', # 0x11
'Jian ', # 0x12
'Meng ', # 0x13
'Hao ', # 0x14
'Ruan ', # 0x15
'Huo ', # 0x16
'Zhuo ', # 0x17
'Jie ', # 0x18
'Bin ', # 0x19
'He ', # 0x1a
'Mie ', # 0x1b
'Fan ', # 0x1c
'Lei ', # 0x1d
'Jie ', # 0x1e
'La ', # 0x1f
'Mi ', # 0x20
'Li ', # 0x21
'Chun ', # 0x22
'Li ', # 0x23
'Qiu ', # 0x24
'Nie ', # 0x25
'Lu ', # 0x26
'Du ', # 0x27
'Xiao ', # 0x28
'Zhu ', # 0x29
'Long ', # 0x2a
'Li ', # 0x2b
'Long ', # 0x2c
'Feng ', # 0x2d
'Ye ', # 0x2e
'Beng ', # 0x2f
'Shang ', # 0x30
'Gu ', # 0x31
'Juan ', # 0x32
'Ying ', # 0x33
'[?] ', # 0x34
'Xi ', # 0x35
'Can ', # 0x36
'Qu ', # 0x37
'Quan ', # 0x38
'Du ', # 0x39
'Can ', # 0x3a
'Man ', # 0x3b
'Jue ', # 0x3c
'Jie ', # 0x3d
'Zhu ', # 0x3e
'Zha ', # 0x3f
'Xie ', # 0x40
'Huang ', # 0x41
'Niu ', # 0x42
'Pei ', # 0x43
'Nu ', # 0x44
'Xin ', # 0x45
'Zhong ', # 0x46
'Mo ', # 0x47
'Er ', # 0x48
'Ke ', # 0x49
'Mie ', # 0x4a
'Xi ', # 0x4b
'Xing ', # 0x4c
'Yan ', # 0x4d
'Kan ', # 0x4e
'Yuan ', # 0x4f
'[?] ', # 0x50
'Ling ', # 0x51
'Xuan ', # 0x52
'Shu ', # 0x53
'Xian ', # 0x54
'Tong ', # 0x55
'Long ', # 0x56
'Jie ', # 0x57
'Xian ', # 0x58
'Ya ', # 0x59
'Hu ', # 0x5a
'Wei ', # 0x5b
'Dao ', # 0x5c
'Chong ', # 0x5d
'Wei ', # 0x5e
'Dao ', # 0x5f
'Zhun ', # 0x60
'Heng ', # 0x61
'Qu ', # 0x62
'Yi ', # 0x63
'Yi ', # 0x64
'Bu ', # 0x65
'Gan ', # 0x66
'Yu ', # 0x67
'Biao ', # 0x68
'Cha ', # 0x69
'Yi ', # 0x6a
'Shan ', # 0x6b
'Chen ', # 0x6c
'Fu ', # 0x6d
'Gun ', # 0x6e
'Fen ', # 0x6f
'Shuai ', # 0x70
'Jie ', # 0x71
'Na ', # 0x72
'Zhong ', # 0x73
'Dan ', # 0x74
'Ri ', # 0x75
'Zhong ', # 0x76
'Zhong ', # 0x77
'Xie ', # 0x78
'Qi ', # 0x79
'Xie ', # 0x7a
'Ran ', # 0x7b
'Zhi ', # 0x7c
'Ren ', # 0x7d
'Qin ', # 0x7e
'Jin ', # 0x7f
'Jun ', # 0x80
'Yuan ', # 0x81
'Mei ', # 0x82
'Chai ', # 0x83
'Ao ', # 0x84
'Niao ', # 0x85
'Hui ', # 0x86
'Ran ', # 0x87
'Jia ', # 0x88
'Tuo ', # 0x89
'Ling ', # 0x8a
'Dai ', # 0x8b
'Bao ', # 0x8c
'Pao ', # 0x8d
'Yao ', # 0x8e
'Zuo ', # 0x8f
'Bi ', # 0x90
'Shao ', # 0x91
'Tan ', # 0x92
'Ju ', # 0x93
'He ', # 0x94
'Shu ', # 0x95
'Xiu ', # 0x96
'Zhen ', # 0x97
'Yi ', # 0x98
'Pa ', # 0x99
'Bo ', # 0x9a
'Di ', # 0x9b
'Wa ', # 0x9c
'Fu ', # 0x9d
'Gun ', # 0x9e
'Zhi ', # 0x9f
'Zhi ', # 0xa0
'Ran ', # 0xa1
'Pan ', # 0xa2
'Yi ', # 0xa3
'Mao ', # 0xa4
'Tuo ', # 0xa5
'Na ', # 0xa6
'Kou ', # 0xa7
'Xian ', # 0xa8
'Chan ', # 0xa9
'Qu ', # 0xaa
'Bei ', # 0xab
'Gun ', # 0xac
'Xi ', # 0xad
'Ne ', # 0xae
'Bo ', # 0xaf
'Horo ', # 0xb0
'Fu ', # 0xb1
'Yi ', # 0xb2
'Chi ', # 0xb3
'Ku ', # 0xb4
'Ren ', # 0xb5
'Jiang ', # 0xb6
'Jia ', # 0xb7
'Cun ', # 0xb8
'Mo ', # 0xb9
'Jie ', # 0xba
'Er ', # 0xbb
'Luo ', # 0xbc
'Ru ', # 0xbd
'Zhu ', # 0xbe
'Gui ', # 0xbf
'Yin ', # 0xc0
'Cai ', # 0xc1
'Lie ', # 0xc2
'Kamishimo ', # 0xc3
'Yuki ', # 0xc4
'Zhuang ', # 0xc5
'Dang ', # 0xc6
'[?] ', # 0xc7
'Kun ', # 0xc8
'Ken ', # 0xc9
'Niao ', # 0xca
'Shu ', # 0xcb
'Jia ', # 0xcc
'Kun ', # 0xcd
'Cheng ', # 0xce
'Li ', # 0xcf
'Juan ', # 0xd0
'Shen ', # 0xd1
'Pou ', # 0xd2
'Ge ', # 0xd3
'Yi ', # 0xd4
'Yu ', # 0xd5
'Zhen ', # 0xd6
'Liu ', # 0xd7
'Qiu ', # 0xd8
'Qun ', # 0xd9
'Ji ', # 0xda
'Yi ', # 0xdb
'Bu ', # 0xdc
'Zhuang ', # 0xdd
'Shui ', # 0xde
'Sha ', # 0xdf
'Qun ', # 0xe0
'Li ', # 0xe1
'Lian ', # 0xe2
'Lian ', # 0xe3
'Ku ', # 0xe4
'Jian ', # 0xe5
'Fou ', # 0xe6
'Chan ', # 0xe7
'Bi ', # 0xe8
'Gun ', # 0xe9
'Tao ', # 0xea
'Yuan ', # 0xeb
'Ling ', # 0xec
'Chi ', # 0xed
'Chang ', # 0xee
'Chou ', # 0xef
'Duo ', # 0xf0
'Biao ', # 0xf1
'Liang ', # 0xf2
'Chang ', # 0xf3
'Pei ', # 0xf4
'Pei ', # 0xf5
'Fei ', # 0xf6
'Yuan ', # 0xf7
'Luo ', # 0xf8
'Guo ', # 0xf9
'Yan ', # 0xfa
'Du ', # 0xfb
'Xi ', # 0xfc
'Zhi ', # 0xfd
'Ju ', # 0xfe
'Qi ', # 0xff
)
| 17.934363
| 23
| 0.390312
|
f3384e333fd19b76757f60f8b7bdd4f24e44f6b1
| 599
|
py
|
Python
|
fractalis/data/etls/picsure/picsure_handler.py
|
thehyve/Fractalis
|
5591112e5bc994eea5baf3d28caa7e5dfee85a57
|
[
"Apache-2.0"
] | null | null | null |
fractalis/data/etls/picsure/picsure_handler.py
|
thehyve/Fractalis
|
5591112e5bc994eea5baf3d28caa7e5dfee85a57
|
[
"Apache-2.0"
] | 6
|
2018-11-02T10:00:04.000Z
|
2021-09-13T14:15:36.000Z
|
fractalis/data/etls/picsure/picsure_handler.py
|
thehyve/Fractalis
|
5591112e5bc994eea5baf3d28caa7e5dfee85a57
|
[
"Apache-2.0"
] | 1
|
2018-10-22T08:12:00.000Z
|
2018-10-22T08:12:00.000Z
|
"""This module provides PicSureHandler,
an implementation of ETLHandler for PIC-SURE."""
import logging
from fractalis.data.etlhandler import ETLHandler
from fractalis.data_services_config import Handler
logger = logging.getLogger(__name__)
class PicSureHandler(ETLHandler):
"""This ETLHandler provides integration with PIC-SURE."""
_handler = Handler.PICSURE
@staticmethod
def make_label(descriptor: dict) -> str:
return descriptor['query']['select'][0]['alias']
def _get_token_for_credentials(self, server: str, auth: dict) -> str:
return auth['token']
| 26.043478
| 73
| 0.734558
|
3c72e0f0c29405a4894579907a1a630552a70adf
| 8,859
|
py
|
Python
|
torch_ecg/models/rr_lstm.py
|
wenh06/torch_ecg
|
a260bac0b4be8cd84c411874af3337358f135442
|
[
"MIT"
] | 7
|
2020-09-29T09:29:18.000Z
|
2021-04-13T14:22:57.000Z
|
torch_ecg/models/rr_lstm.py
|
wenh06/torch_ecg
|
a260bac0b4be8cd84c411874af3337358f135442
|
[
"MIT"
] | null | null | null |
torch_ecg/models/rr_lstm.py
|
wenh06/torch_ecg
|
a260bac0b4be8cd84c411874af3337358f135442
|
[
"MIT"
] | 2
|
2021-04-28T03:13:11.000Z
|
2021-05-15T14:15:34.000Z
|
"""
AF (and perhaps other arrhythmias like preamature beats) detection
using rr time series as input and using lstm as model
References
----------
[1] https://github.com/al3xsh/rnn-based-af-detection
"""
from copy import deepcopy
from itertools import repeat
from collections import OrderedDict
from typing import Union, Optional, Tuple, Sequence, NoReturn, Any
from numbers import Real, Number
import numpy as np
np.set_printoptions(precision=5, suppress=True)
import pandas as pd
import torch
from torch import nn
from torch import Tensor
import torch.nn.functional as F
from easydict import EasyDict as ED
from ..cfg import DEFAULTS
from ..model_configs.rr_lstm import RR_LSTM_CONFIG
from ..utils.misc import dict_to_str
from ..utils.utils_nn import compute_module_size, SizeMixin, CkptMixin
from ..models._nets import (
Mish, Swish, Activations,
NonLocalBlock, SEBlock, GlobalContextBlock,
StackedLSTM,
AttentionWithContext,
SelfAttention, MultiHeadAttention,
AttentivePooling,
SeqLin,
CRF, ExtendedCRF,
)
if DEFAULTS.torch_dtype == torch.float64:
torch.set_default_tensor_type(torch.DoubleTensor)
__all__ = [
"RR_LSTM",
]
class RR_LSTM(CkptMixin, SizeMixin, nn.Module):
"""
classification or sequence labeling using LSTM and using RR intervals as input
"""
__DEBUG__ = True
__name__ = "RR_LSTM"
def __init__(self, classes:Sequence[str], config:Optional[ED]=None, **kwargs:Any) -> NoReturn:
""" finished, checked,
Parameters
----------
classes: list,
list of the classes for classification
config: dict, optional,
other hyper-parameters, including kernel sizes, etc.
ref. the corresponding config file
"""
super().__init__()
self.classes = list(classes)
self.n_classes = len(classes)
self.config = deepcopy(RR_LSTM_CONFIG)
self.config.update(deepcopy(config) or {})
if self.__DEBUG__:
print(f"classes (totally {self.n_classes}) for prediction:{self.classes}")
print(f"configuration of {self.__name__} is as follows\n{dict_to_str(self.config)}")
self.lstm = StackedLSTM(
input_size=1,
hidden_sizes=self.config.lstm.hidden_sizes,
bias=self.config.lstm.bias,
dropouts=self.config.lstm.dropouts,
bidirectional=self.config.lstm.bidirectional,
return_sequences=self.config.lstm.retseq,
)
if self.__DEBUG__:
print(f"\042lstm\042 module has size {self.lstm.module_size}")
attn_input_size = self.lstm.compute_output_shape(None, None)[-1]
if not self.config.lstm.retseq:
self.attn = None
elif self.config.attn.name.lower() == "none":
self.attn = None
clf_input_size = attn_input_size
elif self.config.attn.name.lower() == "nl": # non_local
self.attn = NonLocalBlock(
in_channels=attn_input_size,
filter_lengths=self.config.attn.nl.filter_lengths,
subsample_length=self.config.attn.nl.subsample_length,
batch_norm=self.config.attn.nl.batch_norm,
)
clf_input_size = self.attn.compute_output_shape(None, None)[1]
elif self.config.attn.name.lower() == "se": # squeeze_exitation
self.attn = SEBlock(
in_channels=attn_input_size,
reduction=self.config.attn.se.reduction,
activation=self.config.attn.se.activation,
kw_activation=self.config.attn.se.kw_activation,
bias=self.config.attn.se.bias,
)
clf_input_size = self.attn.compute_output_shape(None, None)[1]
elif self.config.attn.name.lower() == "gc": # global_context
self.attn = GlobalContextBlock(
in_channels=attn_input_size,
ratio=self.config.attn.gc.ratio,
reduction=self.config.attn.gc.reduction,
pooling_type=self.config.attn.gc.pooling_type,
fusion_types=self.config.attn.gc.fusion_types,
)
clf_input_size = self.attn.compute_output_shape(None, None)[1]
elif self.config.attn.name.lower() == "sa": # self_attention
# NOTE: this branch NOT tested
self.attn = SelfAttention(
in_features=attn_input_size,
head_num=self.config.attn.sa.head_num,
dropout=self.config.attn.sa.dropout,
bias=self.config.attn.sa.bias,
)
clf_input_size = self.attn.compute_output_shape(None, None)[-1]
else:
raise NotImplementedError
if self.__DEBUG__ and self.attn:
print(f"attn module \042{self.config.attn.name}\042 has size {self.attn.module_size}")
if not self.config.lstm.retseq:
self.pool = None
self.clf = None
elif self.config.clf.name.lower() == "linear":
if self.config.global_pool.lower() == "max":
self.pool = nn.AdaptiveMaxPool1d((1,))
elif self.config.global_pool.lower() == "none":
self.pool = None
self.clf = SeqLin(
in_channels=clf_input_size,
out_channels=self.config.clf.linear.out_channels + [self.n_classes],
activation=self.config.clf.linear.activation,
bias=self.config.clf.linear.bias,
dropouts=self.config.clf.linear.dropouts,
skip_last_activation=True,
)
elif self.config.clf.name.lower() == "crf":
self.pool = None
self.clf = ExtendedCRF(
in_channels=clf_input_size,
num_tags=self.n_classes,
bias=self.config.clf.crf.proj_bias
)
if self.__DEBUG__ and self.clf:
print(f"clf module \042{self.config.clf.name}\042 has size {self.clf.module_size}")
# for inference, except for crf
self.softmax = nn.Softmax(dim=-1)
self.sigmoid = nn.Sigmoid()
def forward(self, input:Tensor) -> Tensor:
""" finished, checked,
Parameters
----------
input: Tensor,
of shape (seq_len, batch_size, n_channels)
Returns
-------
output: Tensor,
of shape (batch_size, seq_len, n_classes) or (batch_size, n_classes)
"""
# (batch_size, n_channels, seq_len) --> (seq_len, batch_size, n_channels)
# x = input.permute(1,2,0)
x = self.lstm(input) # (seq_len, batch_size, n_channels) or (batch_size, n_channels)
if self.attn:
# (seq_len, batch_size, n_channels) --> (batch_size, n_channels, seq_len)
x = x.permute(1,2,0)
x = self.attn(x) # (batch_size, n_channels, seq_len)
elif x.ndim == 3:
# (seq_len, batch_size, n_channels) --> (batch_size, n_channels, seq_len)
x = x.permute(1,2,0)
if self.pool:
x = self.pool(x) # (batch_size, n_channels, 1)
x = x.squeeze(dim=-1) # (batch_size, n_channels)
elif x.ndim == 3:
x = x.permute(0,2,1) # (batch_size, n_channels, seq_len) --> (batch_size, seq_len, n_channels)
else:
# x of shape (batch_size, n_channels),
# in the case where config.lstm.retseq = False
pass
if self.config.clf.name.lower() == "linear":
x = self.clf(x) # (batch_size, seq_len, n_classes) or (batch_size, n_classes)
elif self.config.clf.name.lower() == "crf":
x = self.clf(x) # (batch_size, seq_len, n_classes)
output = x
return output
@torch.no_grad()
def inference(self, input:Tensor, bin_pred_thr:float=0.5) -> Tensor:
"""
"""
raise NotImplementedError("implement a task specific inference method")
def compute_output_shape(self, seq_len:Optional[int]=None, batch_size:Optional[int]=None) -> Sequence[Union[int, None]]:
""" finished, checked,
Parameters
----------
seq_len: int, optional,
length of the 1d sequence,
if is None, then the input is composed of single feature vectors for each batch
batch_size: int, optional,
the batch size, can be None
Returns
-------
output_shape: sequence,
the output shape of this model, given `seq_len` and `batch_size`
"""
if self.config.clf.name.lower() == "crf":
output_shape = (batch_size, seq_len, self.n_classes)
else:
# clf is "linear" or lstm.retseq is False
output_shape = (batch_size, self.n_classes)
return output_shape
| 37.858974
| 124
| 0.603454
|
4c0cc9727616e422a857f962abbd0890fc58618b
| 1,619
|
py
|
Python
|
copydf/copydf.py
|
alonnir/copydf
|
dd5a460e010fc7acdfeb51251a611fb11dcf7e9d
|
[
"MIT"
] | null | null | null |
copydf/copydf.py
|
alonnir/copydf
|
dd5a460e010fc7acdfeb51251a611fb11dcf7e9d
|
[
"MIT"
] | 1
|
2022-03-13T23:36:26.000Z
|
2022-03-13T23:36:26.000Z
|
copydf/copydf.py
|
alonnir/copydf
|
dd5a460e010fc7acdfeb51251a611fb11dcf7e9d
|
[
"MIT"
] | 2
|
2021-04-18T12:06:00.000Z
|
2022-03-05T11:19:13.000Z
|
from IPython import get_ipython
import re
def copyDF( df ):
'''
A function that copies a dataframe to your clipboard when run in Jupyter.
Args:
* df (``pandas.DataFrame``): a dataframe to copy to the local clipboard
Returns:
None
'''
ipy = get_ipython()
ipy.run_cell_magic( "javascript", "",
'''
function copyToClipboard(text) {
if ( window.clipboardData && window.clipboardData.setData ) { return clipboardData.setData( "Text", text ); }
else if ( document.queryCommandSupported && document.queryCommandSupported( "copy" ) ) {
var textarea = document.createElement( "textarea" );
textarea.textContent = text;
textarea.style.position = "fixed"; // Prevent scrolling to bottom of page in Microsoft Edge.
document.body.appendChild( textarea );
textarea.select();
try { return document.execCommand( "copy" ); }
catch ( ex ) {
console.warn( "Copy to clipboard failed.", ex );
return false;
}
finally { document.body.removeChild( textarea ); }
}
};
copyToClipboard( "%s" );
''' % ( "{}{}".format(
"\t{}\\n".format( "\t".join( [ str( df.columns[ c ] ) for c in range( len( df.columns ) ) ] ) ),
"\\n".join( [ "{}\t{}".format( str( df.index[ r ] ), "\t".join( [ str( df.iloc[ r, c ] ) for c in range( len( df.columns ) ) ] ) ) for r in range( len( df ) ) ] )
) ) )
| 42.605263
| 178
| 0.519456
|
29ca4ad6ff31597505881747800701ef1bf9f751
| 4,423
|
py
|
Python
|
Betsy/Betsy/modules/index_reference_rsem.py
|
jefftc/changlab
|
11da8c415afefcba0b0216238387c75aeb3a56ac
|
[
"MIT"
] | 9
|
2017-01-13T02:38:41.000Z
|
2021-04-08T00:44:39.000Z
|
Betsy/Betsy/modules/index_reference_rsem.py
|
jefftc/changlab
|
11da8c415afefcba0b0216238387c75aeb3a56ac
|
[
"MIT"
] | null | null | null |
Betsy/Betsy/modules/index_reference_rsem.py
|
jefftc/changlab
|
11da8c415afefcba0b0216238387c75aeb3a56ac
|
[
"MIT"
] | 4
|
2017-01-05T16:25:25.000Z
|
2019-12-12T20:07:38.000Z
|
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
out_path):
import os
import shutil
#from genomicode import config
from genomicode import filelib
from genomicode import parallel
from genomicode import alignlib
from Betsy import module_utils as mlib
ref_node, gene_node = antecedents
# Don't copy the whole path. Just get the fasta file.
#ref = alignlib.standardize_reference_genome(
# ref_node.identifier, out_path, use_symlinks=True)
ref = alignlib.create_reference_genome(ref_node.identifier)
gtf_file = gene_node.identifier
filelib.assert_exists_nz(gtf_file)
# Symlink the fasta file into the out path.
filelib.safe_mkdir(out_path)
x = os.path.join(out_path, ref.fasta_file)
os.symlink(ref.fasta_file_full, x)
# rsem-prepare-reference --bowtie --bowtie2 --gtf gtf02.gtf
# <reference.fa> <reference_name>
# <reference_name>.[1234].ebwt # Bowtie1.
# <reference_name>.rev.[12].ebwt
# <reference_name>.[1234].bt2 # Bowtie2.
# <reference_name>.rev.[12].bt2
# <reference_name>.chrlist # RSEM.
# <reference_name>.grp
# <reference_name>.idx.fa
# <reference_name>.n2g.idx.fa
# <reference_name>.seq
# <reference_name>.ti
# <reference_name>.transcripts.fa
# chrLength.txt # STAR
# chrNameLength.txt
# chrName.txt
# chrStart.txt
# exonGeTrInfo.tab
# exonInfo.tab
# gencode.vM8.annotation.gtf
# geneInfo.tab
# Genome
# genomeParameters.txt
# SA
# SAindex
# sjdbInfo.txt
# sjdbList.fromGTF.out.tab
# sjdbList.out.tab
# transcriptInfo.tab
rsem_prepare = mlib.get_config("rsem_prepare", which_assert_file=True)
bowtie = mlib.get_config("bowtie", which_assert_file=True)
bowtie2 = mlib.get_config("bowtie2", which_assert_file=True)
STAR = mlib.get_config("STAR", which_assert_file=True)
# RSEM wants the path that contains the executables.
bowtie = os.path.split(bowtie)[0]
bowtie2 = os.path.split(bowtie2)[0]
STAR = os.path.split(STAR)[0]
sq = parallel.quote
cmd = [
sq(rsem_prepare),
"--num-threads", num_cores,
"--bowtie",
"--bowtie-path", sq(bowtie),
"--bowtie2",
"--bowtie2-path", sq(bowtie2),
"--star",
"--star-path", sq(STAR),
"--gtf", sq(gtf_file),
sq(ref.fasta_file_full),
ref.name,
]
parallel.sshell(cmd, path=out_path)
# Copy the GTF file into the output path.
shutil.copy2(gtf_file, out_path)
assembly = ref.name
# Check to make sure index was created successfully.
x1 = ["%s.%d.ebwt" % (assembly, i+1) for i in range(4)]
x2 = ["%s.rev.%d.ebwt" % (assembly, i+1) for i in range(2)]
x3 = ["%s.%d.bt2" % (assembly, i+1) for i in range(4)]
x4 = ["%s.rev.%d.bt2" % (assembly, i+1) for i in range(2)]
x5 = [
"%s.chrlist" % assembly,
"%s.grp" % assembly,
"%s.idx.fa" % assembly,
"%s.n2g.idx.fa" % assembly,
"%s.seq" % assembly,
"%s.ti" % assembly,
"%s.transcripts.fa" % assembly,
]
x6 = [
"chrLength.txt",
"chrNameLength.txt",
"chrName.txt",
"chrStart.txt",
"exonGeTrInfo.tab",
"exonInfo.tab",
"gencode.vM8.annotation.gtf",
"geneInfo.tab",
"Genome",
"genomeParameters.txt",
"SA",
"SAindex",
"sjdbInfo.txt",
"sjdbList.fromGTF.out.tab",
"sjdbList.out.tab",
"transcriptInfo.tab",
]
x = x1 + x2 + x3 + x4 + x5 + x6
index_files = [os.path.join(out_path, x) for x in x]
filelib.assert_exists_nz_many(index_files)
def name_outfile(self, antecedents, user_options):
return "reference.rsem"
| 34.023077
| 78
| 0.549627
|
335334ad65c5879939ec1b08c97e4dc6b7d84e76
| 7,229
|
py
|
Python
|
mittab/urls.py
|
yoitsdave/mit-tab
|
e0c2bb845cf816291c5db624281d98f54ff4d263
|
[
"MIT"
] | null | null | null |
mittab/urls.py
|
yoitsdave/mit-tab
|
e0c2bb845cf816291c5db624281d98f54ff4d263
|
[
"MIT"
] | null | null | null |
mittab/urls.py
|
yoitsdave/mit-tab
|
e0c2bb845cf816291c5db624281d98f54ff4d263
|
[
"MIT"
] | null | null | null |
from django.views import i18n
from django.conf.urls import include, url
from django.contrib.auth.views import logout
from django.conf import settings
import mittab.apps.tab.views as views
import mittab.apps.tab.judge_views as judge_views
import mittab.apps.tab.team_views as team_views
import mittab.apps.tab.debater_views as debater_views
import mittab.apps.tab.pairing_views as pairing_views
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls), name='admin'),
url(r'^dynamic-media/jsi18n/$', i18n.javascript_catalog, name='js18'),
url(r'^$', views.index, name='index'),
url(r'^403/', views.render_403, name='403'),
# TODO: named urls
# Account related
url(r'^accounts/login/$', views.tab_login, name='tab_login'),
url(r'^accounts/logout/$', logout, name='logout'),
# Judge related
url(r'^judge/(\d+)/$', judge_views.view_judge, name='view_judge'),
url(r'^judge/(\d+)/delete/$', judge_views.delete_judge, name='delete_judge'),
url(r'^judge/(\d+)/scratches/add/(\d+)/', judge_views.add_scratches,
name='add_scratches'),
url(r'^judge/(\d+)/scratches/view/', judge_views.view_scratches,
name='view_scratches'),
url(r'^judge/(\d+)/check_ins/round/(\d+)/$', judge_views.judge_check_in,
name='judge_check_in'),
url(r'^view_judges/$', judge_views.view_judges, name='view_judges'),
url(r'^enter_judge/$', judge_views.enter_judge, name='enter_judge'),
url(r'^batch_checkin/$', judge_views.batch_checkin, name='batch_checkin'),
# School related
url(r'^school/(\d+)/$', views.view_school, name='view_school'),
url(r'^school/(\d+)/delete/$', views.delete_school, name='delete_school'),
url(r'^view_schools/$', views.view_schools, name='view_schools'),
url(r'^enter_school/$', views.enter_school, name='enter_school'),
# Room related
url(r'^room/(\d+)/$', views.view_room, name='view_room'),
url(r'^room/(\d+)/delete/$', views.delete_room, name='delete_room'),
url(r'^view_rooms/$', views.view_rooms, name='view_rooms'),
url(r'^enter_room/$', views.enter_room, name='enter_room'),
# Scratch related
url(r'^judge/(\d+)/scratches/delete/(\d+)/', views.delete_scratch,
name='delete_scratch_judge'),
url(r'^team/(\d+)/scratches/delete/(\d+)/', views.delete_scratch,
name='delete_scratch_team'),
url(r'^scratches/view/', views.view_scratches, name='view_scratches'),
url(r'^enter_scratch/', views.add_scratch, name='add_scratch'),
# Team related
url(r'^team/(\d+)/$', team_views.view_team, name='view_team'),
url(r'^team/(\d+)/delete/$', team_views.delete_team, name='delete_team'),
url(r'^team/(\d+)/scratches/add/(\d+)/', team_views.add_scratches,
name='add_scratches'),
url(r'^team/(\d+)/scratches/view/', team_views.view_scratches,
name='view_scratches_team'),
url(r'^team/(\d+)/stats/', team_views.team_stats, name='team_stats'),
url(r'^view_teams/$', team_views.view_teams, name='view_teams'),
url(r'^enter_team/$', team_views.enter_team, name='enter_team'),
url(r'^all_tab_cards/$', team_views.all_tab_cards, name='all_tab_cards'),
url(r'^team/card/(\d+)/$', team_views.tab_card, name='tab_card'),
url(r'^team/card/(\d+)/pretty/$', team_views.pretty_tab_card,
name='pretty_tab_card'),
url(r'^team/ranking/$', team_views.rank_teams_ajax, name='rank_teams_ajax'),
url(r'^team/rank/$', team_views.rank_teams, name='rank_teams'),
# Debater related
url(r'^debater/(\d+)/$', debater_views.view_debater, name='view_debater'),
url(r'^debater/(\d+)/delete/$', debater_views.delete_debater,
name='delete_debater'),
url(r'^view_debaters/$', debater_views.view_debaters, name='view_debaters'),
url(r'^enter_debater/$', debater_views.enter_debater, name='enter_debater'),
url(r'^debater/ranking/$', debater_views.rank_debaters_ajax,
name='rank_debaters_ajax'),
url(r'^debater/rank/$', debater_views.rank_debaters, name='rank_debaters'),
# Pairing related
url(r'^pairings/status/$', pairing_views.view_status, name='view_status'),
url(r'^pairings/view_rounds/$', pairing_views.view_rounds, name='view_rounds'),
url(r'^round/(\d+)/$', pairing_views.view_round, name='view_round'),
url(r'^round/(\d+)/result/$', pairing_views.enter_result, name='enter_result'),
url(r'^round/(\d+)/result/(\d+)/$', pairing_views.enter_multiple_results,
name='enter_multiple_results'),
url(r'^round/(\d+)/alternative_judges/(\d+)/$',
pairing_views.alternative_judges, name='round_alternative_judges'),
url(r'^round/(\d+)/alternative_judges/$', pairing_views.alternative_judges,
name='alternative_judges'),
url(r'^round/(\d+)/assign_judge/(\d+)/$', pairing_views.assign_judge,
name='assign_judge'),
url(r'^round/(\d+)/assign_judge/(\d+)/(\d+)/$',
pairing_views.assign_judge, name='swap_judge'),
url(r'^pairing/pair_round/$', pairing_views.pair_round, name='pair_round'),
url(r'^pairing/assign_judges/$', pairing_views.assign_judges_to_pairing,
name='assign_judges'),
url(r'^pairing/confirm_start_tourny/$',
pairing_views.confirm_start_new_tourny, name='confirm_start_tourny'),
url(r'^pairing/start_tourny/$', pairing_views.start_new_tourny,
name='start_tourny'),
url(r'^pairings/pairinglist/$', pairing_views.pretty_pair, name='pretty_pair'),
url(r'^pairings/missing_ballots/$', pairing_views.missing_ballots,
name='missing_ballots'),
url(r'^pairings/pairinglist/printable/$', pairing_views.pretty_pair_print,
name='pretty_pair_print'),
url(r'^pairing/backup/$', pairing_views.manual_backup, name='manual_backup'),
url(r'^pairing/release/$', pairing_views.toggle_pairing_released,
name='toggle_pairing_released'),
url(r'^pairing/view_backups/$', pairing_views.view_backups,
name='view_backups'),
url(r'^pairings/swap/(\d+)/(\d+)/with/(\d+)/(\d+)/$',
pairing_views.swap_judges_in_round, name='swap_judges_in_round'),
url(r'^pairings/swap_team/(\d+)/(\d+)/with/(\d+)/(\d+)/$',
pairing_views.swap_teams_in_round, name='swap_teams_in_round'),
url(r'^e_ballots/$', pairing_views.e_ballot_search, name='e_ballot_search'),
url(r'e_ballots/(\S+)/$', pairing_views.enter_e_ballot, name='enter_e_ballot'),
# Backups
url(r'^backup/restore/(.+)/$', pairing_views.restore_backup,
name='restore_backup'),
url(r'^backup/download/(.+)/$', pairing_views.download_backup,
name='download_backup'),
url(r'^backup/(.+)/$', pairing_views.view_backup, name='view_backup'),
url(r'^upload_backup/$', pairing_views.upload_backup, name='upload_backup'),
# Data Upload
url(r'^import_data/$', views.upload_data, name='upload_data'),
# Publicly accessible personal pages
url(r'^public_status/(\d+)/$', team_views.public_status, name='public_status'),
]
| 50.908451
| 83
| 0.67409
|
2a30e25d5fe9c18e235797b1bd19ab7ca2c34287
| 503
|
py
|
Python
|
testkit/backend.py
|
AndyHeap-NeoTech/neo4j-dotnet-driver
|
ea19f49c6d09b824bef1fce86d66e92140a13dc5
|
[
"Apache-2.0"
] | 1
|
2020-04-08T08:02:06.000Z
|
2020-04-08T08:02:06.000Z
|
testkit/backend.py
|
AndyHeap-NeoTech/neo4j-dotnet-driver
|
ea19f49c6d09b824bef1fce86d66e92140a13dc5
|
[
"Apache-2.0"
] | null | null | null |
testkit/backend.py
|
AndyHeap-NeoTech/neo4j-dotnet-driver
|
ea19f49c6d09b824bef1fce86d66e92140a13dc5
|
[
"Apache-2.0"
] | 1
|
2021-04-09T13:42:03.000Z
|
2021-04-09T13:42:03.000Z
|
"""
Executed in dotnet driver container.
Assumes driver and backend has been built.
Responsible for starting the test backend.
"""
import os, subprocess, sys
if __name__ == "__main__":
backend_path = os.path.join(
"bin", "Publish", "Neo4j.Driver.Tests.TestBackend.dll"
)
logfile_path = os.path.join("..", "artifacts", "backend.log")
subprocess.check_call(
["dotnet", backend_path, "0.0.0.0", "9876", logfile_path],
stdout=sys.stdout, stderr=sys.stderr
)
| 23.952381
| 66
| 0.66004
|
fa9e0e5b10c152932c396e180da5d05301f21551
| 389
|
py
|
Python
|
jtyoui/data/Enums.py
|
vanton/Jtyoui
|
c44d66b038ac5f4e2d75b68b3493d02f7b7b385e
|
[
"MIT"
] | 2
|
2019-11-06T01:47:17.000Z
|
2019-11-06T01:48:19.000Z
|
jtyoui/data/Enums.py
|
liangxioa/Jtyoui
|
5a584cbf12d644b6c4fb13167d8841a383afbbac
|
[
"MIT"
] | null | null | null |
jtyoui/data/Enums.py
|
liangxioa/Jtyoui
|
5a584cbf12d644b6c4fb13167d8841a383afbbac
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.7
# -*- coding: utf-8 -*-
# @Time : 2019/4/29 15:08
# @Author: Jtyoui@qq.com
from enum import Enum
class Languages(Enum):
"""英语单词对应中文"""
English = '英语'
Japanese = '日语'
Korean = '韩语'
French = '法语'
German = '德语'
Spanish = '西班牙语'
Chinese = '汉语'
if __name__ == '__main__':
print(type(Languages.English))
print(Languages.English)
| 17.681818
| 34
| 0.583548
|
1de2f8a6845eb094ec18e25ad3d21a1e102f1fa8
| 326
|
py
|
Python
|
Python-Exercise-100/python-exercise-example72.py
|
MiracleWong/PythonPractice
|
40aecd84045ad18f6aff95d5b8be8e352ca0a726
|
[
"MIT"
] | null | null | null |
Python-Exercise-100/python-exercise-example72.py
|
MiracleWong/PythonPractice
|
40aecd84045ad18f6aff95d5b8be8e352ca0a726
|
[
"MIT"
] | null | null | null |
Python-Exercise-100/python-exercise-example72.py
|
MiracleWong/PythonPractice
|
40aecd84045ad18f6aff95d5b8be8e352ca0a726
|
[
"MIT"
] | null | null | null |
#!#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Content:
# 地址:http: //www.runoob.com/python/python-exercise-example72.html
# 知识点:Python 链表
if __name__ == "__main__":
ptr = []
for i in range(5):
num = int(input("please input a number:\n"))
ptr.append(num)
print(ptr)
# TODO: List 作为链表来使用是否合适
| 18.111111
| 65
| 0.601227
|
5c0189254064ee56c95d47514157c33692bb03a9
| 489
|
py
|
Python
|
tests/test_app.py
|
JeremiahNgige/pitch-app
|
2fc0e49f50cb2f936930ec4214b87afc830962c8
|
[
"MIT"
] | null | null | null |
tests/test_app.py
|
JeremiahNgige/pitch-app
|
2fc0e49f50cb2f936930ec4214b87afc830962c8
|
[
"MIT"
] | null | null | null |
tests/test_app.py
|
JeremiahNgige/pitch-app
|
2fc0e49f50cb2f936930ec4214b87afc830962c8
|
[
"MIT"
] | null | null | null |
import unittest
from app.models import User, Post, Comment
class PitchTest(unittest.TestCase):
def setUp(self):
self.new_user = User(username='victor', email='bkafrika144@gmail.com', password='tyrrelhaslay718')
self.new_post = Post()
self.new_comment = Comment()
def test_user_instance(self):
pass
def test_post_instance(self):
pass
def test_comment_instance(self):
pass
if __name__ == '__main__':
unittest.main()
| 21.26087
| 106
| 0.664622
|
f0d9befdc93778e60609ad4f3324a1c4d49b8744
| 6,148
|
py
|
Python
|
carbondesign/tags/tooltip.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tags/tooltip.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tags/tooltip.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
"""
Tooltip
=======
See: https://www.carbondesignsystem.com/components/tooltip/usage/
Tooltips display additional information upon click, hover, or focus. The
information should be contextual, useful, and nonessential.
Overview
--------
A tooltip is a message box that is displayed when a user hovers over, clicks
or gives focus to a UI element such as an icon, a highlighted word, or a button.
""" # pylint:disable=line-too-long
# pylint:disable=too-many-lines
from .base import Node, modify_svg
class BaseTooltip(Node):
"""Base tooltip.
"""
WANT_CHILDREN = True
"Template Tag needs closing end tag."
NODE_PROPS = ('id', 'align', 'position')
"Extended Template Tag arguments."
POSSIBLE_ALIGN = ('start', 'center', 'end')
"Documentation only."
POSSIBLE_POSITION = ('top', 'right', 'bottom', 'left')
"Documentation only."
def prepare(self, values, context):
"""Prepare values for rendering the templates.
"""
align = self.eval(self.kwargs.get('align'), context)
if align and align in self.POSSIBLE_ALIGN:
values['class'].append(f'bx--tooltip--align-{align}')
position = self.eval(self.kwargs.get('position'), context)
if position and position in self.POSSIBLE_POSITION:
values['class'].append(f'bx--tooltip--{position}')
def render_slot_icon(self, values, context):
"""Render html of the slot.
"""
return modify_svg(values['child'], {
'focusable': 'false',
'preserveAspectRatio': 'xMidYMid meet',
'fill': 'currentColor',
'style': {
'width': '%spx' % 16,
'height': '%spx' % 16,
},
'aria-hidden': 'true',
})
class Interactive(BaseTooltip):
"""Tooltip component.
"""
SLOTS = ('icon', 'footer', 'heading')
"Named children."
MODES = ('interactive', 'nolabel')
"Available variants."
REQUIRED_PROPS = ('label',)
"Will raise Exception if not set."
CLASS_AND_PROPS = ('label', 'content')
"Prepare xxx_class and xxx_props values."
def prepare(self, values, context):
"""Prepare values for rendering the templates.
"""
if 'heading' in self.slots:
values['content_props'].append(
('aria-labelledby', 'heading-' + self._id))
elif self.mode == 'nolabel':
values['content_props'].append(('aria-label',
values['label'] + values['label_suffix']))
else:
values['content_props'].append(
('aria-labelledby', 'label-' + self._id))
def render_interactive(self, values, context):
"""Output html of the component.
"""
template = """
<div id="label-{id}" class="bx--tooltip__label">
{label}{label_suffix}
<button aria-expanded="false" aria-labelledby="label-{id}"
data-tooltip-trigger data-tooltip-target="#{id}"
class="bx--tooltip__trigger {class}" aria-controls="{id}" {props}>
{slot_icon}
</button>
</div>
<div id="{id}" aria-hidden="true" data-floating-menu-direction="bottom"
class="bx--tooltip">
<span class="bx--tooltip__caret"></span>
<div class="bx--tooltip__content {content_class}" tabindex="-1" role="dialog"
aria-describedby="body-{id}" {content_props}>
{slot_heading}
<p id="body-{id}">{child}</p>
{slot_footer}
</div>
<span tabindex="0"></span>
</div>
"""
return self.format(template, values, context)
def render_nolabel(self, values, context):
"""Output html of the component.
"""
template = """
<div id="label-{id}" class="bx--tooltip__label">
{label}{label_suffix}
<div tabindex="0" aria-expanded="false" aria-labelledby="label-{id}"
data-tooltip-trigger data-tooltip-target="#{id}"
role="button" class="bx--tooltip__trigger {class}" aria-controls="{id}" {props}>
{slot_icon}
</div>
</div>
<div id="{id}" aria-hidden="true" data-floating-menu-direction="bottom"
class="bx--tooltip">
<span class="bx--tooltip__caret"></span>
<div class="bx--tooltip__content {content_class}" tabindex="-1" role="dialog"
aria-describedby="body-{id}" {content_props}>
{slot_heading}
<p id="body-{id}">{child}</p>
{slot_footer}
</div>
<span tabindex="0"></span>
</div>
"""
return self.format(template, values, context)
def render_slot_heading(self, values, context):
"""Render html of the slot.
"""
template = """
<h4 id="heading-{id}" class="bx--tooltip__heading {class}" {props}>{child}</h4>
"""
return self.format(template, values)
def render_slot_footer(self, values, context):
"""Render html of the slot.
"""
template = """
<div class="bx--tooltip__footer {class}" {props}>
{child}
</div>
"""
return self.format(template, values)
class Definition(BaseTooltip):
"""Tooltip component.
"""
REQUIRED_PROPS = ('label',)
"Will raise Exception if not set."
def render_default(self, values, context):
"""Output html of the component.
"""
template = """
<div class="bx--tooltip--definition bx--tooltip--a11y" data-tooltip-definition>
<button aria-describedby="{id}"
class="bx--tooltip__trigger bx--tooltip--a11y bx--tooltip__trigger--definition {class}"
{props}>
{label}{label_suffix}
</button>
<div class="bx--assistive-text" id="{id}" role="tooltip">
{child}
</div>
</div>
"""
return self.format(template, values)
class Icon(BaseTooltip):
"""Tooltip component.
"""
SLOTS = ('icon',)
"Named children."
def render_default(self, values, context):
"""Output html of the component.
"""
template = """
<button class="bx--tooltip__trigger bx--tooltip--a11y {class}"
data-tooltip-icon {props}>
<span class="bx--assistive-text">{child}</span>
{slot_icon}
</button>
"""
return self.format(template, values, context)
components = {
'InteractiveTooltip': Interactive,
'DefinitionTooltip': Definition,
'IconTooltip': Icon,
}
| 29.700483
| 93
| 0.610117
|
edac343ba431ee82c4dee94a707c9788382b0cde
| 370,684
|
py
|
Python
|
template_container_ferret/labels/slice_71.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
template_container_ferret/labels/slice_71.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
template_container_ferret/labels/slice_71.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
coordinates_993300 = ((222, 20),
(222, 22), (222, 23), (222, 24), (222, 25), (222, 26), (222, 27), (222, 28), (222, 29), (222, 30), (222, 31), (222, 32), (222, 33), (222, 34), (222, 35), (222, 37), (222, 50), (222, 52), (222, 53), (222, 54), (222, 55), (222, 56), (222, 57), (222, 58), (222, 59), (222, 60), (222, 61), (222, 62), (222, 63), (222, 64), (222, 65), (222, 66), (222, 67), (222, 68), (222, 69), (222, 70), (222, 71), (222, 72), (222, 73), (222, 74), (222, 75), (222, 76), (222, 77), (222, 78), (222, 80), (223, 20), (223, 22), (223, 23), (223, 24), (223, 25), (223, 26), (223, 27), (223, 28), (223, 29), (223, 30), (223, 31), (223, 32), (223, 33), (223, 34), (223, 35), (223, 37), (223, 51), (223, 53), (223, 54), (223, 55), (223, 56), (223, 57), (223, 58), (223, 59), (223, 60), (223, 61), (223, 62), (223, 63),
(223, 64), (223, 65), (223, 66), (223, 67), (223, 68), (223, 69), (223, 70), (223, 71), (223, 72), (223, 73), (223, 74), (223, 75), (223, 76), (223, 77), (223, 78), (223, 80), )
coordinates_F45DF4 = ((120, 113),
(121, 110), (121, 113), (122, 112), (122, 114), (123, 109), (123, 111), (123, 112), (123, 114), (123, 171), (123, 172), (124, 108), (124, 110), (124, 111), (124, 112), (124, 113), (124, 114), (124, 115), (124, 166), (124, 167), (124, 168), (124, 169), (124, 170), (124, 171), (124, 172), (124, 173), (124, 174), (124, 175), (124, 176), (124, 177), (124, 178), (125, 108), (125, 110), (125, 111), (125, 112), (125, 113), (125, 115), (125, 162), (125, 163), (125, 165), (125, 176), (125, 177), (125, 180), (125, 182), (126, 107), (126, 109), (126, 110), (126, 111), (126, 112), (126, 113), (126, 115), (126, 160), (126, 163), (126, 179), (126, 183), (126, 185), (127, 107), (127, 109), (127, 110), (127, 111), (127, 112), (127, 113), (127, 115), (127, 158), (127, 162), (127, 181), (127, 186), (127, 187), (127, 188), (128, 107), (128, 109), (128, 110), (128, 111),
(128, 112), (128, 113), (128, 114), (128, 116), (128, 155), (128, 156), (128, 161), (128, 183), (128, 190), (129, 106), (129, 108), (129, 109), (129, 110), (129, 111), (129, 112), (129, 113), (129, 114), (129, 116), (129, 153), (129, 154), (129, 157), (129, 158), (129, 160), (129, 185), (129, 192), (130, 106), (130, 108), (130, 109), (130, 110), (130, 111), (130, 112), (130, 113), (130, 114), (130, 116), (130, 152), (130, 155), (130, 156), (130, 157), (130, 159), (130, 188), (130, 189), (130, 190), (130, 191), (130, 193), (131, 105), (131, 106), (131, 107), (131, 108), (131, 109), (131, 110), (131, 111), (131, 112), (131, 113), (131, 114), (131, 115), (131, 117), (131, 150), (131, 153), (131, 154), (131, 155), (131, 156), (131, 158), (132, 105), (132, 107), (132, 108), (132, 109), (132, 110), (132, 111), (132, 112), (132, 113), (132, 114), (132, 115),
(132, 117), (132, 146), (132, 147), (132, 148), (132, 151), (132, 152), (132, 153), (132, 154), (132, 155), (132, 157), (133, 105), (133, 107), (133, 108), (133, 109), (133, 110), (133, 111), (133, 112), (133, 113), (133, 114), (133, 115), (133, 116), (133, 118), (133, 142), (133, 144), (133, 145), (133, 149), (133, 150), (133, 151), (133, 152), (133, 153), (133, 154), (133, 155), (133, 157), (134, 104), (134, 106), (134, 107), (134, 108), (134, 109), (134, 110), (134, 111), (134, 112), (134, 113), (134, 114), (134, 115), (134, 116), (134, 117), (134, 119), (134, 140), (134, 145), (134, 146), (134, 147), (134, 148), (134, 149), (134, 150), (134, 151), (134, 152), (134, 153), (134, 154), (134, 156), (135, 104), (135, 106), (135, 107), (135, 108), (135, 109), (135, 110), (135, 111), (135, 112), (135, 113), (135, 114), (135, 115), (135, 116), (135, 117),
(135, 118), (135, 120), (135, 139), (135, 142), (135, 143), (135, 144), (135, 145), (135, 146), (135, 147), (135, 148), (135, 149), (135, 150), (135, 151), (135, 152), (135, 153), (135, 154), (136, 61), (136, 63), (136, 64), (136, 104), (136, 106), (136, 107), (136, 108), (136, 109), (136, 110), (136, 111), (136, 112), (136, 113), (136, 114), (136, 115), (136, 116), (136, 117), (136, 118), (136, 119), (136, 122), (136, 137), (136, 140), (136, 141), (136, 142), (136, 143), (136, 144), (136, 145), (136, 146), (136, 147), (136, 148), (136, 149), (136, 150), (136, 151), (136, 152), (136, 153), (136, 155), (137, 61), (137, 65), (137, 103), (137, 105), (137, 106), (137, 107), (137, 108), (137, 109), (137, 110), (137, 111), (137, 112), (137, 113), (137, 114), (137, 115), (137, 116), (137, 117), (137, 118), (137, 119), (137, 120), (137, 124), (137, 125),
(137, 135), (137, 136), (137, 139), (137, 140), (137, 141), (137, 142), (137, 143), (137, 144), (137, 145), (137, 146), (137, 147), (137, 148), (137, 149), (137, 150), (137, 151), (137, 152), (137, 154), (138, 61), (138, 63), (138, 64), (138, 67), (138, 103), (138, 105), (138, 106), (138, 107), (138, 108), (138, 109), (138, 110), (138, 111), (138, 112), (138, 113), (138, 114), (138, 115), (138, 116), (138, 117), (138, 118), (138, 119), (138, 120), (138, 121), (138, 122), (138, 123), (138, 126), (138, 127), (138, 128), (138, 129), (138, 130), (138, 131), (138, 132), (138, 133), (138, 134), (138, 137), (138, 138), (138, 139), (138, 140), (138, 141), (138, 142), (138, 143), (138, 144), (138, 145), (138, 146), (138, 147), (138, 148), (138, 149), (138, 150), (138, 151), (138, 153), (139, 61), (139, 63), (139, 64), (139, 65), (139, 68), (139, 102),
(139, 103), (139, 104), (139, 105), (139, 106), (139, 107), (139, 108), (139, 109), (139, 110), (139, 111), (139, 112), (139, 113), (139, 114), (139, 115), (139, 116), (139, 117), (139, 118), (139, 119), (139, 120), (139, 121), (139, 122), (139, 123), (139, 124), (139, 125), (139, 135), (139, 136), (139, 137), (139, 138), (139, 139), (139, 140), (139, 141), (139, 142), (139, 143), (139, 144), (139, 145), (139, 146), (139, 147), (139, 148), (139, 149), (139, 150), (139, 152), (140, 61), (140, 63), (140, 64), (140, 65), (140, 66), (140, 67), (140, 69), (140, 102), (140, 104), (140, 105), (140, 106), (140, 107), (140, 108), (140, 109), (140, 110), (140, 111), (140, 112), (140, 113), (140, 114), (140, 115), (140, 116), (140, 117), (140, 118), (140, 119), (140, 120), (140, 121), (140, 122), (140, 123), (140, 124), (140, 125), (140, 126), (140, 127),
(140, 128), (140, 129), (140, 130), (140, 131), (140, 132), (140, 133), (140, 134), (140, 135), (140, 136), (140, 137), (140, 138), (140, 139), (140, 140), (140, 141), (140, 142), (140, 143), (140, 144), (140, 145), (140, 146), (140, 147), (140, 148), (140, 149), (141, 62), (141, 64), (141, 65), (141, 66), (141, 67), (141, 68), (141, 71), (141, 102), (141, 103), (141, 104), (141, 105), (141, 106), (141, 107), (141, 108), (141, 109), (141, 110), (141, 111), (141, 112), (141, 113), (141, 114), (141, 115), (141, 116), (141, 117), (141, 118), (141, 119), (141, 120), (141, 121), (141, 122), (141, 123), (141, 124), (141, 125), (141, 126), (141, 127), (141, 128), (141, 129), (141, 130), (141, 131), (141, 132), (141, 133), (141, 134), (141, 135), (141, 136), (141, 137), (141, 138), (141, 139), (141, 140), (141, 141), (141, 142), (141, 143), (141, 144),
(141, 145), (141, 146), (141, 147), (141, 148), (141, 149), (141, 151), (142, 62), (142, 64), (142, 65), (142, 66), (142, 67), (142, 68), (142, 69), (142, 72), (142, 101), (142, 103), (142, 104), (142, 105), (142, 106), (142, 107), (142, 108), (142, 109), (142, 110), (142, 111), (142, 112), (142, 113), (142, 114), (142, 115), (142, 116), (142, 117), (142, 118), (142, 119), (142, 120), (142, 121), (142, 122), (142, 123), (142, 124), (142, 125), (142, 126), (142, 127), (142, 128), (142, 129), (142, 130), (142, 131), (142, 132), (142, 133), (142, 134), (142, 135), (142, 136), (142, 137), (142, 138), (142, 139), (142, 140), (142, 141), (142, 142), (142, 143), (142, 144), (142, 145), (142, 146), (142, 147), (142, 148), (142, 150), (143, 63), (143, 65), (143, 66), (143, 67), (143, 68), (143, 69), (143, 70), (143, 73), (143, 100), (143, 102),
(143, 103), (143, 104), (143, 105), (143, 106), (143, 107), (143, 108), (143, 109), (143, 110), (143, 111), (143, 112), (143, 113), (143, 114), (143, 115), (143, 116), (143, 117), (143, 118), (143, 119), (143, 120), (143, 121), (143, 122), (143, 123), (143, 124), (143, 125), (143, 126), (143, 127), (143, 128), (143, 129), (143, 130), (143, 131), (143, 132), (143, 133), (143, 134), (143, 135), (143, 136), (143, 137), (143, 138), (143, 139), (143, 140), (143, 141), (143, 142), (143, 143), (143, 144), (143, 145), (143, 146), (143, 147), (143, 149), (144, 64), (144, 67), (144, 68), (144, 69), (144, 70), (144, 71), (144, 72), (144, 74), (144, 100), (144, 102), (144, 103), (144, 104), (144, 105), (144, 106), (144, 107), (144, 108), (144, 109), (144, 110), (144, 111), (144, 112), (144, 113), (144, 114), (144, 115), (144, 116), (144, 117), (144, 118),
(144, 119), (144, 120), (144, 121), (144, 122), (144, 123), (144, 124), (144, 125), (144, 126), (144, 127), (144, 128), (144, 129), (144, 130), (144, 131), (144, 132), (144, 133), (144, 134), (144, 135), (144, 136), (144, 137), (144, 138), (144, 139), (144, 140), (144, 141), (144, 142), (144, 143), (144, 144), (144, 145), (144, 146), (144, 148), (145, 65), (145, 68), (145, 69), (145, 70), (145, 71), (145, 72), (145, 73), (145, 75), (145, 99), (145, 101), (145, 102), (145, 103), (145, 104), (145, 105), (145, 106), (145, 107), (145, 108), (145, 109), (145, 110), (145, 111), (145, 112), (145, 113), (145, 114), (145, 115), (145, 116), (145, 117), (145, 118), (145, 119), (145, 120), (145, 121), (145, 122), (145, 123), (145, 124), (145, 125), (145, 126), (145, 127), (145, 128), (145, 129), (145, 130), (145, 131), (145, 132), (145, 140), (145, 141),
(145, 142), (145, 143), (145, 144), (145, 145), (145, 146), (145, 148), (146, 66), (146, 69), (146, 70), (146, 71), (146, 72), (146, 73), (146, 74), (146, 76), (146, 97), (146, 100), (146, 101), (146, 102), (146, 103), (146, 104), (146, 105), (146, 106), (146, 107), (146, 108), (146, 109), (146, 110), (146, 111), (146, 112), (146, 113), (146, 114), (146, 115), (146, 116), (146, 117), (146, 118), (146, 119), (146, 120), (146, 121), (146, 122), (146, 123), (146, 124), (146, 125), (146, 126), (146, 127), (146, 128), (146, 129), (146, 130), (146, 133), (146, 134), (146, 135), (146, 136), (146, 137), (146, 138), (146, 139), (146, 143), (146, 144), (146, 145), (146, 147), (147, 68), (147, 70), (147, 71), (147, 72), (147, 73), (147, 74), (147, 75), (147, 78), (147, 95), (147, 99), (147, 100), (147, 101), (147, 102), (147, 103), (147, 104),
(147, 105), (147, 106), (147, 107), (147, 108), (147, 109), (147, 110), (147, 111), (147, 112), (147, 113), (147, 114), (147, 115), (147, 116), (147, 117), (147, 118), (147, 119), (147, 120), (147, 121), (147, 122), (147, 123), (147, 124), (147, 125), (147, 126), (147, 127), (147, 128), (147, 129), (147, 131), (147, 132), (147, 140), (147, 142), (147, 147), (148, 69), (148, 71), (148, 72), (148, 73), (148, 74), (148, 75), (148, 76), (148, 79), (148, 95), (148, 97), (148, 98), (148, 99), (148, 100), (148, 101), (148, 102), (148, 103), (148, 104), (148, 105), (148, 106), (148, 107), (148, 108), (148, 109), (148, 110), (148, 111), (148, 112), (148, 113), (148, 114), (148, 115), (148, 116), (148, 117), (148, 118), (148, 119), (148, 120), (148, 121), (148, 122), (148, 123), (148, 124), (148, 125), (148, 126), (148, 127), (148, 130), (148, 143),
(148, 144), (148, 146), (149, 70), (149, 72), (149, 73), (149, 74), (149, 75), (149, 76), (149, 77), (149, 80), (149, 94), (149, 96), (149, 97), (149, 98), (149, 99), (149, 100), (149, 101), (149, 102), (149, 103), (149, 104), (149, 105), (149, 106), (149, 107), (149, 108), (149, 109), (149, 110), (149, 111), (149, 112), (149, 113), (149, 114), (149, 115), (149, 116), (149, 117), (149, 118), (149, 119), (149, 120), (149, 121), (149, 122), (149, 123), (149, 124), (149, 125), (149, 126), (149, 127), (149, 129), (150, 71), (150, 73), (150, 74), (150, 75), (150, 76), (150, 77), (150, 78), (150, 79), (150, 81), (150, 90), (150, 92), (150, 95), (150, 96), (150, 97), (150, 98), (150, 99), (150, 100), (150, 101), (150, 102), (150, 103), (150, 104), (150, 105), (150, 106), (150, 107), (150, 108), (150, 109), (150, 110), (150, 111),
(150, 112), (150, 113), (150, 114), (150, 115), (150, 116), (150, 117), (150, 118), (150, 119), (150, 120), (150, 121), (150, 122), (150, 123), (150, 124), (150, 125), (150, 126), (150, 128), (151, 72), (151, 74), (151, 75), (151, 76), (151, 77), (151, 78), (151, 79), (151, 80), (151, 82), (151, 89), (151, 94), (151, 95), (151, 96), (151, 97), (151, 98), (151, 99), (151, 100), (151, 101), (151, 102), (151, 103), (151, 104), (151, 105), (151, 106), (151, 107), (151, 108), (151, 109), (151, 110), (151, 111), (151, 112), (151, 113), (151, 114), (151, 115), (151, 116), (151, 117), (151, 118), (151, 119), (151, 120), (151, 121), (151, 122), (151, 123), (151, 124), (151, 125), (151, 127), (152, 72), (152, 74), (152, 75), (152, 76), (152, 77), (152, 78), (152, 79), (152, 80), (152, 81), (152, 84), (152, 88), (152, 90), (152, 91),
(152, 92), (152, 93), (152, 94), (152, 95), (152, 96), (152, 97), (152, 98), (152, 99), (152, 100), (152, 101), (152, 102), (152, 103), (152, 104), (152, 105), (152, 106), (152, 107), (152, 108), (152, 109), (152, 110), (152, 111), (152, 112), (152, 113), (152, 114), (152, 115), (152, 116), (152, 117), (152, 118), (152, 119), (152, 120), (152, 121), (152, 122), (152, 123), (152, 124), (152, 126), (153, 73), (153, 75), (153, 76), (153, 77), (153, 78), (153, 79), (153, 80), (153, 81), (153, 82), (153, 85), (153, 86), (153, 89), (153, 90), (153, 91), (153, 92), (153, 93), (153, 94), (153, 95), (153, 96), (153, 97), (153, 98), (153, 99), (153, 100), (153, 101), (153, 102), (153, 103), (153, 104), (153, 105), (153, 106), (153, 107), (153, 108), (153, 109), (153, 110), (153, 111), (153, 112), (153, 113), (153, 114), (153, 115),
(153, 116), (153, 117), (153, 118), (153, 119), (153, 120), (153, 121), (153, 122), (153, 123), (153, 124), (153, 126), (154, 73), (154, 75), (154, 76), (154, 77), (154, 78), (154, 79), (154, 80), (154, 81), (154, 82), (154, 83), (154, 84), (154, 88), (154, 89), (154, 90), (154, 91), (154, 92), (154, 93), (154, 94), (154, 95), (154, 96), (154, 97), (154, 98), (154, 99), (154, 100), (154, 101), (154, 102), (154, 103), (154, 104), (154, 105), (154, 106), (154, 107), (154, 108), (154, 109), (154, 110), (154, 111), (154, 112), (154, 113), (154, 114), (154, 115), (154, 116), (154, 117), (154, 118), (154, 119), (154, 120), (154, 121), (154, 122), (154, 123), (154, 125), (155, 73), (155, 75), (155, 76), (155, 77), (155, 78), (155, 79), (155, 80), (155, 81), (155, 82), (155, 83), (155, 84), (155, 85), (155, 86), (155, 87),
(155, 88), (155, 89), (155, 90), (155, 91), (155, 92), (155, 93), (155, 94), (155, 95), (155, 96), (155, 97), (155, 98), (155, 99), (155, 100), (155, 101), (155, 102), (155, 103), (155, 104), (155, 105), (155, 106), (155, 107), (155, 108), (155, 109), (155, 110), (155, 111), (155, 112), (155, 113), (155, 114), (155, 115), (155, 116), (155, 117), (155, 118), (155, 119), (155, 120), (155, 121), (155, 122), (155, 123), (155, 125), (156, 73), (156, 75), (156, 76), (156, 77), (156, 78), (156, 79), (156, 80), (156, 81), (156, 82), (156, 83), (156, 84), (156, 85), (156, 86), (156, 87), (156, 88), (156, 89), (156, 90), (156, 91), (156, 92), (156, 93), (156, 94), (156, 95), (156, 96), (156, 97), (156, 98), (156, 99), (156, 100), (156, 101), (156, 102), (156, 103), (156, 104), (156, 105), (156, 106), (156, 107), (156, 108),
(156, 109), (156, 110), (156, 111), (156, 112), (156, 113), (156, 114), (156, 115), (156, 116), (156, 117), (156, 118), (156, 119), (156, 120), (156, 121), (156, 122), (156, 124), (157, 73), (157, 75), (157, 76), (157, 77), (157, 78), (157, 79), (157, 80), (157, 81), (157, 82), (157, 83), (157, 84), (157, 85), (157, 86), (157, 87), (157, 88), (157, 89), (157, 90), (157, 91), (157, 92), (157, 93), (157, 94), (157, 95), (157, 96), (157, 97), (157, 98), (157, 99), (157, 100), (157, 101), (157, 102), (157, 103), (157, 104), (157, 105), (157, 106), (157, 107), (157, 108), (157, 109), (157, 110), (157, 111), (157, 112), (157, 113), (157, 114), (157, 115), (157, 116), (157, 117), (157, 118), (157, 119), (157, 120), (157, 121), (157, 122), (157, 124), (158, 73), (158, 75), (158, 76), (158, 77), (158, 78), (158, 79), (158, 80),
(158, 81), (158, 82), (158, 83), (158, 84), (158, 85), (158, 86), (158, 87), (158, 88), (158, 89), (158, 90), (158, 91), (158, 92), (158, 93), (158, 94), (158, 95), (158, 96), (158, 97), (158, 98), (158, 99), (158, 100), (158, 101), (158, 102), (158, 103), (158, 104), (158, 105), (158, 106), (158, 107), (158, 108), (158, 109), (158, 110), (158, 111), (158, 112), (158, 113), (158, 114), (158, 115), (158, 116), (158, 117), (158, 118), (158, 119), (158, 120), (158, 121), (158, 122), (158, 123), (158, 125), (159, 73), (159, 75), (159, 76), (159, 77), (159, 78), (159, 79), (159, 80), (159, 81), (159, 82), (159, 83), (159, 84), (159, 85), (159, 86), (159, 87), (159, 88), (159, 89), (159, 90), (159, 91), (159, 92), (159, 93), (159, 94), (159, 95), (159, 96), (159, 97), (159, 98), (159, 99), (159, 100), (159, 101),
(159, 102), (159, 103), (159, 104), (159, 105), (159, 106), (159, 107), (159, 108), (159, 109), (159, 110), (159, 111), (159, 112), (159, 113), (159, 114), (159, 115), (159, 116), (159, 117), (159, 118), (159, 119), (159, 127), (159, 128), (159, 129), (159, 130), (159, 131), (159, 133), (160, 73), (160, 75), (160, 76), (160, 77), (160, 78), (160, 79), (160, 80), (160, 81), (160, 82), (160, 83), (160, 84), (160, 85), (160, 86), (160, 87), (160, 88), (160, 89), (160, 90), (160, 91), (160, 92), (160, 93), (160, 94), (160, 95), (160, 96), (160, 97), (160, 98), (160, 99), (160, 100), (160, 101), (160, 102), (160, 103), (160, 104), (160, 105), (160, 106), (160, 107), (160, 113), (160, 114), (160, 115), (160, 116), (160, 117), (160, 118), (160, 121), (160, 122), (160, 123), (160, 124), (160, 125), (160, 126), (160, 127), (160, 128),
(160, 129), (160, 130), (160, 131), (160, 133), (161, 73), (161, 75), (161, 76), (161, 77), (161, 78), (161, 79), (161, 80), (161, 81), (161, 82), (161, 83), (161, 84), (161, 85), (161, 86), (161, 87), (161, 88), (161, 89), (161, 90), (161, 91), (161, 92), (161, 93), (161, 94), (161, 95), (161, 96), (161, 97), (161, 98), (161, 99), (161, 100), (161, 101), (161, 102), (161, 103), (161, 104), (161, 105), (161, 106), (161, 109), (161, 110), (161, 111), (161, 112), (161, 116), (161, 117), (161, 119), (162, 73), (162, 75), (162, 76), (162, 77), (162, 78), (162, 79), (162, 80), (162, 81), (162, 82), (162, 83), (162, 84), (162, 85), (162, 86), (162, 87), (162, 88), (162, 89), (162, 90), (162, 91), (162, 92), (162, 93), (162, 94), (162, 95), (162, 96), (162, 97), (162, 98), (162, 99), (162, 100), (162, 101),
(162, 102), (162, 103), (162, 104), (162, 105), (162, 107), (162, 114), (162, 115), (162, 118), (163, 73), (163, 75), (163, 76), (163, 77), (163, 78), (163, 79), (163, 80), (163, 81), (163, 82), (163, 83), (163, 84), (163, 85), (163, 86), (163, 87), (163, 88), (163, 89), (163, 90), (163, 91), (163, 92), (163, 93), (163, 94), (163, 95), (163, 96), (163, 97), (163, 98), (163, 99), (163, 100), (163, 101), (163, 102), (163, 103), (163, 104), (163, 106), (163, 116), (163, 117), (164, 73), (164, 75), (164, 76), (164, 77), (164, 78), (164, 79), (164, 80), (164, 81), (164, 82), (164, 83), (164, 84), (164, 85), (164, 86), (164, 87), (164, 88), (164, 89), (164, 90), (164, 91), (164, 92), (164, 93), (164, 94), (164, 95), (164, 96), (164, 97), (164, 98), (164, 99), (164, 100), (164, 101), (164, 102), (164, 103),
(164, 104), (164, 106), (165, 73), (165, 75), (165, 76), (165, 77), (165, 78), (165, 79), (165, 80), (165, 81), (165, 82), (165, 83), (165, 84), (165, 85), (165, 86), (165, 87), (165, 88), (165, 89), (165, 90), (165, 91), (165, 92), (165, 93), (165, 94), (165, 95), (165, 96), (165, 97), (165, 98), (165, 99), (165, 100), (165, 101), (165, 102), (165, 103), (165, 105), (166, 73), (166, 75), (166, 76), (166, 77), (166, 78), (166, 79), (166, 80), (166, 81), (166, 82), (166, 83), (166, 84), (166, 85), (166, 86), (166, 87), (166, 88), (166, 89), (166, 90), (166, 91), (166, 92), (166, 93), (166, 94), (166, 95), (166, 96), (166, 97), (166, 98), (166, 99), (166, 100), (166, 101), (166, 102), (166, 103), (166, 105), (167, 73), (167, 75), (167, 76), (167, 77), (167, 78), (167, 79), (167, 80), (167, 81),
(167, 82), (167, 83), (167, 84), (167, 85), (167, 86), (167, 87), (167, 88), (167, 89), (167, 90), (167, 91), (167, 92), (167, 93), (167, 94), (167, 95), (167, 96), (167, 97), (167, 98), (167, 99), (167, 100), (167, 101), (167, 102), (167, 103), (167, 105), (168, 73), (168, 75), (168, 76), (168, 77), (168, 78), (168, 79), (168, 80), (168, 81), (168, 82), (168, 83), (168, 84), (168, 85), (168, 86), (168, 87), (168, 88), (168, 89), (168, 90), (168, 91), (168, 92), (168, 93), (168, 94), (168, 95), (168, 96), (168, 97), (168, 98), (168, 99), (168, 100), (168, 101), (168, 102), (168, 104), (169, 73), (169, 75), (169, 76), (169, 77), (169, 78), (169, 79), (169, 80), (169, 81), (169, 82), (169, 83), (169, 84), (169, 85), (169, 86), (169, 87), (169, 88), (169, 89), (169, 90), (169, 91), (169, 92),
(169, 93), (169, 94), (169, 95), (169, 96), (169, 97), (169, 98), (169, 99), (169, 100), (169, 101), (169, 103), (170, 73), (170, 75), (170, 76), (170, 77), (170, 78), (170, 79), (170, 80), (170, 81), (170, 82), (170, 83), (170, 84), (170, 85), (170, 86), (170, 87), (170, 88), (170, 89), (170, 90), (170, 91), (170, 92), (170, 93), (170, 94), (170, 95), (170, 96), (170, 97), (170, 98), (170, 99), (170, 100), (170, 102), (171, 73), (171, 75), (171, 76), (171, 77), (171, 78), (171, 79), (171, 80), (171, 81), (171, 82), (171, 83), (171, 84), (171, 85), (171, 86), (171, 87), (171, 88), (171, 89), (171, 90), (171, 91), (171, 92), (171, 93), (171, 94), (171, 97), (171, 98), (171, 99), (171, 101), (172, 73), (172, 75), (172, 76), (172, 77), (172, 78), (172, 79), (172, 80), (172, 81), (172, 82),
(172, 83), (172, 84), (172, 85), (172, 86), (172, 89), (172, 90), (172, 91), (172, 92), (172, 93), (172, 98), (172, 100), (173, 73), (173, 75), (173, 76), (173, 77), (173, 78), (173, 79), (173, 80), (173, 81), (173, 82), (173, 83), (173, 84), (173, 87), (173, 88), (173, 92), (173, 93), (173, 94), (173, 97), (173, 99), (174, 72), (174, 74), (174, 75), (174, 76), (174, 77), (174, 78), (174, 79), (174, 80), (174, 81), (174, 82), (174, 83), (174, 84), (174, 86), (174, 89), (174, 90), (174, 91), (174, 93), (174, 98), (175, 72), (175, 74), (175, 75), (175, 76), (175, 77), (175, 78), (175, 79), (175, 80), (175, 81), (175, 82), (175, 83), (175, 84), (175, 93), (176, 72), (176, 74), (176, 75), (176, 76), (176, 77), (176, 78), (176, 79), (176, 80), (176, 81), (176, 82), (176, 84), (177, 71),
(177, 73), (177, 74), (177, 75), (177, 76), (177, 77), (177, 78), (177, 79), (177, 80), (177, 81), (177, 82), (177, 84), (178, 71), (178, 73), (178, 74), (178, 75), (178, 76), (178, 77), (178, 78), (178, 79), (178, 80), (178, 81), (178, 83), (179, 70), (179, 72), (179, 73), (179, 74), (179, 75), (179, 76), (179, 77), (179, 78), (179, 79), (179, 80), (179, 81), (179, 83), (180, 69), (180, 71), (180, 72), (180, 73), (180, 74), (180, 75), (180, 76), (180, 77), (180, 78), (180, 79), (180, 80), (180, 81), (180, 83), (181, 68), (181, 70), (181, 71), (181, 72), (181, 73), (181, 74), (181, 75), (181, 76), (181, 77), (181, 78), (181, 79), (181, 80), (181, 81), (181, 83), (182, 67), (182, 69), (182, 70), (182, 71), (182, 72), (182, 73), (182, 74), (182, 75), (182, 76), (182, 77), (182, 78),
(182, 79), (182, 80), (182, 81), (182, 83), (183, 65), (183, 68), (183, 69), (183, 70), (183, 71), (183, 72), (183, 73), (183, 74), (183, 75), (183, 76), (183, 77), (183, 78), (183, 79), (183, 80), (183, 82), (184, 64), (184, 67), (184, 68), (184, 69), (184, 70), (184, 71), (184, 72), (184, 73), (184, 74), (184, 75), (184, 76), (184, 77), (184, 78), (184, 79), (184, 80), (184, 82), (185, 62), (185, 65), (185, 66), (185, 67), (185, 68), (185, 69), (185, 70), (185, 71), (185, 72), (185, 73), (185, 74), (185, 75), (185, 76), (185, 77), (185, 78), (185, 79), (185, 81), (186, 59), (186, 60), (186, 64), (186, 65), (186, 66), (186, 67), (186, 68), (186, 69), (186, 78), (186, 79), (186, 81), (187, 56), (187, 57), (187, 58), (187, 61), (187, 62), (187, 63), (187, 64), (187, 65), (187, 66),
(187, 67), (187, 68), (187, 70), (187, 71), (187, 72), (187, 73), (187, 74), (187, 75), (187, 76), (187, 79), (187, 80), (188, 53), (188, 54), (188, 55), (188, 59), (188, 60), (188, 61), (188, 62), (188, 63), (188, 64), (188, 65), (188, 66), (188, 67), (188, 69), (188, 78), (188, 80), (189, 50), (189, 51), (189, 52), (189, 56), (189, 57), (189, 58), (189, 59), (189, 60), (189, 61), (189, 62), (189, 63), (189, 64), (189, 65), (189, 66), (189, 68), (189, 79), (190, 47), (190, 48), (190, 53), (190, 54), (190, 55), (190, 56), (190, 57), (190, 58), (190, 59), (190, 60), (190, 61), (190, 62), (190, 63), (190, 64), (190, 65), (190, 66), (190, 68), (190, 79), (191, 44), (191, 45), (191, 49), (191, 50), (191, 51), (191, 52), (191, 53), (191, 54), (191, 55), (191, 56), (191, 57), (191, 58),
(191, 59), (191, 60), (191, 61), (191, 62), (191, 63), (191, 64), (191, 65), (191, 66), (191, 67), (191, 69), (192, 36), (192, 38), (192, 39), (192, 40), (192, 41), (192, 42), (192, 43), (192, 47), (192, 48), (192, 49), (192, 50), (192, 51), (192, 52), (192, 70), (193, 35), (193, 44), (193, 45), (193, 46), (193, 47), (193, 48), (193, 49), (193, 53), (193, 54), (193, 55), (193, 56), (193, 57), (193, 58), (193, 59), (193, 60), (193, 61), (193, 62), (193, 63), (193, 64), (193, 65), (193, 66), (193, 67), (193, 68), (193, 70), (194, 34), (194, 36), (194, 37), (194, 38), (194, 39), (194, 40), (194, 41), (194, 42), (194, 43), (194, 44), (194, 45), (194, 46), (194, 47), (194, 50), (194, 51), (194, 52), (195, 33), (195, 35), (195, 36), (195, 37), (195, 38), (195, 39), (195, 40), (195, 41),
(195, 42), (195, 43), (195, 44), (195, 48), (195, 49), (196, 32), (196, 34), (196, 35), (196, 36), (196, 37), (196, 38), (196, 39), (196, 40), (196, 41), (196, 42), (196, 46), (197, 32), (197, 34), (197, 35), (197, 36), (197, 37), (197, 38), (197, 39), (197, 40), (197, 44), (198, 31), (198, 33), (198, 34), (198, 35), (198, 36), (198, 37), (198, 38), (198, 42), (199, 31), (199, 33), (199, 34), (199, 35), (199, 36), (199, 40), (200, 32), (200, 33), (200, 34), (200, 38), (201, 31), (201, 36), (202, 32), (202, 34), (246, 36), (246, 38), (247, 34), (247, 39), (247, 41), (248, 34), (248, 36), (248, 37), (248, 38), (248, 42), (248, 43), (248, 44), (249, 33), (249, 36), (249, 37), (249, 38), (249, 39), (249, 40), (249, 41), (249, 45), (249, 46), (249, 47), (250, 34), (250, 39), (250, 40),
(250, 41), (250, 42), (250, 43), (250, 44), (250, 48), (250, 49), (251, 36), (251, 38), (251, 42), (251, 43), (251, 44), (251, 45), (251, 46), (251, 47), (251, 50), (251, 51), (251, 52), (251, 53), (252, 39), (252, 40), (252, 41), (252, 44), (252, 45), (252, 46), (252, 47), (252, 48), (252, 49), (252, 54), (252, 55), (252, 56), (252, 57), (252, 59), (252, 68), (252, 71), (253, 42), (253, 43), (253, 47), (253, 48), (253, 49), (253, 50), (253, 51), (253, 52), (253, 53), (253, 54), (253, 59), (253, 66), (253, 71), (254, 45), (254, 46), (254, 50), (254, 51), (254, 52), (254, 53), (254, 54), (254, 55), (254, 56), (254, 57), (254, 58), (254, 59), (254, 60), (254, 64), (254, 68), (254, 70), (255, 47), (255, 48), (255, 49), (255, 53), (255, 54), (255, 55), (255, 56), (255, 57), (255, 58),
(255, 59), (255, 61), (255, 62), (255, 63), (255, 66), (255, 67), (255, 68), (255, 70), (256, 50), (256, 55), (256, 56), (256, 57), (256, 58), (256, 59), (256, 60), (256, 64), (256, 65), (256, 66), (256, 67), (256, 69), (257, 53), (257, 54), (257, 57), (257, 58), (257, 59), (257, 60), (257, 61), (257, 62), (257, 63), (257, 64), (257, 65), (257, 66), (257, 67), (257, 68), (257, 69), (258, 55), (258, 56), (258, 60), (258, 61), (258, 62), (258, 63), (258, 64), (258, 65), (258, 66), (258, 67), (258, 68), (258, 69), (258, 70), (258, 71), (258, 72), (258, 73), (258, 74), (258, 75), (258, 76), (258, 77), (258, 78), (258, 80), (258, 86), (258, 87), (259, 58), (259, 62), (259, 63), (259, 64), (259, 65), (259, 66), (259, 67), (259, 68), (259, 69), (259, 70), (259, 80), (260, 60), (260, 64),
(260, 65), (260, 66), (260, 67), (260, 68), (260, 69), (260, 70), (260, 71), (260, 72), (260, 73), (260, 74), (260, 75), (260, 76), (260, 77), (260, 78), (260, 80), (261, 62), (261, 66), (261, 67), (261, 68), (261, 69), (261, 70), (261, 71), (261, 72), (261, 73), (261, 74), (261, 75), (261, 76), (261, 77), (261, 78), (261, 80), (262, 64), (262, 67), (262, 68), (262, 69), (262, 70), (262, 71), (262, 72), (262, 73), (262, 74), (262, 75), (262, 76), (262, 77), (262, 78), (262, 80), (263, 66), (263, 69), (263, 70), (263, 71), (263, 72), (263, 73), (263, 74), (263, 75), (263, 76), (263, 77), (263, 78), (263, 80), (264, 67), (264, 70), (264, 71), (264, 72), (264, 73), (264, 74), (264, 75), (264, 76), (264, 77), (264, 78), (264, 79), (264, 80), (264, 81), (264, 84), (264, 85), (264, 86),
(264, 87), (264, 89), (265, 69), (265, 71), (265, 72), (265, 73), (265, 74), (265, 75), (265, 76), (265, 77), (265, 78), (265, 79), (265, 80), (265, 81), (265, 82), (265, 83), (265, 91), (266, 70), (266, 72), (266, 73), (266, 74), (266, 75), (266, 76), (266, 77), (266, 78), (266, 79), (266, 80), (266, 81), (266, 84), (266, 85), (266, 86), (266, 87), (266, 88), (266, 89), (266, 93), (267, 71), (267, 73), (267, 74), (267, 75), (267, 76), (267, 77), (267, 78), (267, 79), (267, 80), (267, 81), (267, 82), (267, 83), (267, 84), (267, 85), (267, 86), (267, 87), (267, 88), (267, 89), (267, 90), (267, 91), (267, 93), (268, 72), (268, 74), (268, 75), (268, 76), (268, 77), (268, 78), (268, 79), (268, 80), (268, 81), (268, 82), (268, 83), (268, 84), (268, 85), (268, 86), (268, 87), (268, 88),
(268, 89), (268, 90), (268, 91), (268, 93), (269, 72), (269, 73), (269, 75), (269, 76), (269, 77), (269, 78), (269, 79), (269, 80), (269, 81), (269, 82), (269, 83), (269, 84), (269, 85), (269, 86), (269, 87), (269, 88), (269, 89), (269, 90), (269, 91), (269, 92), (269, 94), (270, 73), (270, 75), (270, 76), (270, 77), (270, 78), (270, 79), (270, 80), (270, 81), (270, 82), (270, 83), (270, 84), (270, 85), (270, 86), (270, 87), (270, 88), (270, 89), (270, 90), (270, 91), (270, 92), (270, 93), (270, 95), (271, 73), (271, 75), (271, 76), (271, 77), (271, 78), (271, 79), (271, 80), (271, 81), (271, 82), (271, 83), (271, 84), (271, 85), (271, 86), (271, 87), (271, 88), (271, 89), (271, 90), (271, 91), (271, 92), (271, 93), (271, 94), (271, 97), (272, 73), (272, 75), (272, 76), (272, 77),
(272, 78), (272, 79), (272, 80), (272, 81), (272, 82), (272, 83), (272, 84), (272, 85), (272, 86), (272, 87), (272, 88), (272, 89), (272, 90), (272, 91), (272, 92), (272, 93), (272, 94), (272, 95), (272, 98), (272, 99), (272, 100), (272, 101), (272, 102), (273, 73), (273, 75), (273, 76), (273, 77), (273, 78), (273, 79), (273, 80), (273, 81), (273, 82), (273, 83), (273, 84), (273, 85), (273, 86), (273, 87), (273, 88), (273, 89), (273, 90), (273, 91), (273, 92), (273, 93), (273, 94), (273, 95), (273, 96), (273, 97), (273, 103), (273, 105), (274, 73), (274, 75), (274, 76), (274, 77), (274, 78), (274, 79), (274, 80), (274, 81), (274, 82), (274, 83), (274, 84), (274, 85), (274, 86), (274, 87), (274, 88), (274, 89), (274, 90), (274, 91), (274, 92), (274, 93), (274, 94), (274, 95), (274, 96),
(274, 97), (274, 98), (274, 99), (274, 100), (274, 101), (274, 102), (274, 105), (275, 73), (275, 75), (275, 76), (275, 77), (275, 78), (275, 79), (275, 80), (275, 81), (275, 82), (275, 83), (275, 84), (275, 85), (275, 86), (275, 87), (275, 88), (275, 89), (275, 90), (275, 91), (275, 92), (275, 93), (275, 94), (275, 95), (275, 96), (275, 97), (275, 98), (275, 99), (275, 100), (275, 101), (275, 102), (275, 103), (275, 105), (276, 73), (276, 75), (276, 76), (276, 77), (276, 78), (276, 79), (276, 80), (276, 81), (276, 82), (276, 83), (276, 84), (276, 85), (276, 86), (276, 87), (276, 88), (276, 89), (276, 90), (276, 91), (276, 92), (276, 93), (276, 94), (276, 95), (276, 96), (276, 97), (276, 98), (276, 99), (276, 100), (276, 101), (276, 102), (276, 104), (277, 73), (277, 75), (277, 76), (277, 77),
(277, 78), (277, 79), (277, 80), (277, 81), (277, 82), (277, 83), (277, 84), (277, 85), (277, 86), (277, 87), (277, 88), (277, 89), (277, 90), (277, 91), (277, 92), (277, 93), (277, 94), (277, 95), (277, 96), (277, 97), (277, 98), (277, 99), (277, 100), (277, 101), (277, 103), (278, 73), (278, 75), (278, 76), (278, 77), (278, 78), (278, 79), (278, 80), (278, 81), (278, 82), (278, 83), (278, 84), (278, 85), (278, 86), (278, 87), (278, 88), (278, 89), (278, 90), (278, 91), (278, 92), (278, 93), (278, 94), (278, 95), (278, 96), (278, 97), (278, 98), (278, 99), (278, 100), (278, 101), (278, 103), (279, 73), (279, 75), (279, 76), (279, 77), (279, 78), (279, 79), (279, 80), (279, 81), (279, 82), (279, 83), (279, 84), (279, 85), (279, 86), (279, 87), (279, 88), (279, 89), (279, 90), (279, 91),
(279, 92), (279, 93), (279, 94), (279, 95), (279, 96), (279, 97), (279, 98), (279, 99), (279, 100), (279, 102), (280, 73), (280, 75), (280, 76), (280, 77), (280, 78), (280, 79), (280, 80), (280, 81), (280, 82), (280, 83), (280, 84), (280, 85), (280, 86), (280, 87), (280, 88), (280, 89), (280, 90), (280, 91), (280, 92), (280, 93), (280, 94), (280, 95), (280, 96), (280, 97), (280, 98), (280, 99), (280, 100), (280, 101), (280, 103), (281, 73), (281, 75), (281, 76), (281, 77), (281, 78), (281, 79), (281, 80), (281, 81), (281, 82), (281, 83), (281, 84), (281, 85), (281, 86), (281, 87), (281, 88), (281, 89), (281, 90), (281, 91), (281, 92), (281, 93), (281, 94), (281, 95), (281, 96), (281, 97), (281, 98), (281, 99), (281, 100), (281, 101), (281, 102), (281, 105), (282, 73), (282, 75), (282, 76),
(282, 77), (282, 78), (282, 79), (282, 80), (282, 81), (282, 82), (282, 83), (282, 84), (282, 85), (282, 86), (282, 87), (282, 88), (282, 89), (282, 90), (282, 91), (282, 92), (282, 93), (282, 94), (282, 95), (282, 96), (282, 97), (282, 98), (282, 99), (282, 100), (282, 101), (282, 102), (282, 103), (282, 106), (283, 73), (283, 75), (283, 76), (283, 77), (283, 78), (283, 79), (283, 80), (283, 81), (283, 82), (283, 83), (283, 84), (283, 85), (283, 86), (283, 87), (283, 88), (283, 89), (283, 90), (283, 91), (283, 92), (283, 93), (283, 94), (283, 95), (283, 96), (283, 97), (283, 98), (283, 99), (283, 100), (283, 101), (283, 102), (283, 103), (283, 104), (283, 105), (283, 108), (284, 73), (284, 75), (284, 76), (284, 77), (284, 78), (284, 79), (284, 80), (284, 81), (284, 82), (284, 83), (284, 84),
(284, 85), (284, 86), (284, 87), (284, 88), (284, 89), (284, 90), (284, 91), (284, 92), (284, 93), (284, 94), (284, 95), (284, 96), (284, 97), (284, 98), (284, 99), (284, 100), (284, 101), (284, 102), (284, 103), (284, 104), (284, 105), (284, 106), (284, 109), (284, 118), (285, 73), (285, 75), (285, 76), (285, 77), (285, 78), (285, 79), (285, 80), (285, 81), (285, 82), (285, 83), (285, 84), (285, 85), (285, 86), (285, 87), (285, 88), (285, 89), (285, 90), (285, 91), (285, 92), (285, 93), (285, 94), (285, 95), (285, 96), (285, 97), (285, 98), (285, 99), (285, 100), (285, 101), (285, 102), (285, 103), (285, 104), (285, 105), (285, 106), (285, 107), (285, 108), (285, 111), (285, 117), (285, 119), (285, 120), (285, 121), (285, 122), (285, 123), (285, 124), (285, 125), (285, 127), (285, 128), (285, 129), (285, 130),
(285, 131), (285, 133), (286, 72), (286, 74), (286, 75), (286, 76), (286, 77), (286, 78), (286, 79), (286, 80), (286, 81), (286, 82), (286, 83), (286, 84), (286, 85), (286, 86), (286, 87), (286, 88), (286, 89), (286, 90), (286, 91), (286, 92), (286, 93), (286, 94), (286, 95), (286, 96), (286, 97), (286, 98), (286, 99), (286, 100), (286, 101), (286, 102), (286, 103), (286, 104), (286, 105), (286, 106), (286, 107), (286, 108), (286, 109), (286, 112), (286, 116), (286, 126), (287, 72), (287, 74), (287, 75), (287, 76), (287, 77), (287, 78), (287, 79), (287, 80), (287, 81), (287, 82), (287, 83), (287, 84), (287, 85), (287, 86), (287, 87), (287, 88), (287, 89), (287, 90), (287, 91), (287, 92), (287, 93), (287, 94), (287, 95), (287, 96), (287, 97), (287, 98), (287, 99), (287, 100), (287, 101), (287, 102),
(287, 103), (287, 104), (287, 105), (287, 106), (287, 107), (287, 108), (287, 109), (287, 110), (287, 111), (287, 113), (287, 114), (287, 117), (287, 118), (287, 119), (287, 120), (287, 121), (287, 122), (287, 123), (287, 124), (287, 125), (288, 72), (288, 74), (288, 75), (288, 76), (288, 77), (288, 78), (288, 79), (288, 80), (288, 81), (288, 82), (288, 83), (288, 84), (288, 85), (288, 86), (288, 87), (288, 88), (288, 89), (288, 90), (288, 91), (288, 92), (288, 93), (288, 94), (288, 95), (288, 96), (288, 97), (288, 98), (288, 99), (288, 100), (288, 101), (288, 102), (288, 103), (288, 104), (288, 105), (288, 106), (288, 107), (288, 108), (288, 109), (288, 110), (288, 111), (288, 112), (288, 116), (288, 117), (288, 118), (288, 119), (288, 120), (288, 121), (288, 122), (288, 124), (289, 71), (289, 73), (289, 74), (289, 75),
(289, 76), (289, 77), (289, 78), (289, 79), (289, 80), (289, 81), (289, 82), (289, 83), (289, 84), (289, 85), (289, 86), (289, 87), (289, 88), (289, 89), (289, 90), (289, 91), (289, 92), (289, 93), (289, 94), (289, 95), (289, 96), (289, 97), (289, 98), (289, 99), (289, 100), (289, 101), (289, 102), (289, 103), (289, 104), (289, 105), (289, 106), (289, 107), (289, 108), (289, 109), (289, 110), (289, 111), (289, 112), (289, 113), (289, 114), (289, 115), (289, 116), (289, 117), (289, 118), (289, 119), (289, 120), (289, 121), (289, 122), (289, 124), (290, 71), (290, 73), (290, 74), (290, 75), (290, 76), (290, 77), (290, 78), (290, 79), (290, 80), (290, 81), (290, 82), (290, 83), (290, 84), (290, 85), (290, 86), (290, 87), (290, 88), (290, 89), (290, 90), (290, 91), (290, 92), (290, 93), (290, 94), (290, 95),
(290, 96), (290, 97), (290, 98), (290, 99), (290, 100), (290, 101), (290, 102), (290, 103), (290, 104), (290, 105), (290, 106), (290, 107), (290, 108), (290, 109), (290, 110), (290, 111), (290, 112), (290, 113), (290, 114), (290, 115), (290, 116), (290, 117), (290, 118), (290, 119), (290, 120), (290, 121), (290, 122), (290, 124), (291, 70), (291, 72), (291, 73), (291, 74), (291, 75), (291, 76), (291, 77), (291, 78), (291, 79), (291, 80), (291, 81), (291, 82), (291, 83), (291, 84), (291, 85), (291, 86), (291, 87), (291, 88), (291, 89), (291, 90), (291, 91), (291, 92), (291, 93), (291, 94), (291, 95), (291, 96), (291, 97), (291, 98), (291, 99), (291, 100), (291, 101), (291, 102), (291, 103), (291, 104), (291, 105), (291, 106), (291, 107), (291, 108), (291, 109), (291, 110), (291, 111), (291, 112), (291, 113), (291, 114),
(291, 115), (291, 116), (291, 117), (291, 118), (291, 119), (291, 120), (291, 121), (291, 122), (291, 124), (292, 69), (292, 71), (292, 72), (292, 73), (292, 74), (292, 75), (292, 76), (292, 77), (292, 78), (292, 79), (292, 80), (292, 81), (292, 82), (292, 83), (292, 84), (292, 85), (292, 86), (292, 87), (292, 88), (292, 89), (292, 90), (292, 91), (292, 92), (292, 93), (292, 94), (292, 95), (292, 96), (292, 97), (292, 98), (292, 99), (292, 100), (292, 101), (292, 102), (292, 103), (292, 104), (292, 105), (292, 106), (292, 107), (292, 108), (292, 109), (292, 110), (292, 111), (292, 112), (292, 113), (292, 114), (292, 115), (292, 116), (292, 117), (292, 118), (292, 119), (292, 120), (292, 121), (292, 122), (292, 123), (292, 124), (292, 126), (293, 67), (293, 70), (293, 71), (293, 72), (293, 73), (293, 74), (293, 75),
(293, 76), (293, 77), (293, 78), (293, 79), (293, 89), (293, 90), (293, 91), (293, 92), (293, 93), (293, 94), (293, 95), (293, 96), (293, 97), (293, 98), (293, 99), (293, 100), (293, 101), (293, 102), (293, 103), (293, 104), (293, 105), (293, 106), (293, 107), (293, 108), (293, 109), (293, 110), (293, 111), (293, 112), (293, 113), (293, 114), (293, 115), (293, 116), (293, 117), (293, 118), (293, 119), (293, 120), (293, 121), (293, 122), (293, 123), (293, 124), (293, 127), (293, 128), (293, 129), (293, 130), (293, 146), (294, 65), (294, 66), (294, 69), (294, 70), (294, 71), (294, 72), (294, 73), (294, 74), (294, 75), (294, 76), (294, 77), (294, 78), (294, 80), (294, 81), (294, 82), (294, 83), (294, 84), (294, 85), (294, 86), (294, 87), (294, 88), (294, 91), (294, 92), (294, 93), (294, 94), (294, 95), (294, 96),
(294, 97), (294, 98), (294, 99), (294, 100), (294, 101), (294, 102), (294, 103), (294, 104), (294, 105), (294, 106), (294, 107), (294, 108), (294, 109), (294, 110), (294, 111), (294, 112), (294, 113), (294, 114), (294, 115), (294, 116), (294, 117), (294, 118), (294, 119), (294, 120), (294, 121), (294, 122), (294, 123), (294, 124), (294, 125), (294, 126), (294, 131), (294, 132), (294, 133), (294, 134), (294, 135), (294, 136), (294, 137), (294, 138), (294, 139), (294, 140), (294, 141), (294, 142), (294, 143), (294, 144), (294, 146), (295, 64), (295, 67), (295, 68), (295, 69), (295, 70), (295, 71), (295, 72), (295, 73), (295, 74), (295, 75), (295, 76), (295, 77), (295, 79), (295, 89), (295, 92), (295, 93), (295, 94), (295, 95), (295, 96), (295, 97), (295, 98), (295, 99), (295, 100), (295, 101), (295, 102), (295, 103), (295, 104),
(295, 105), (295, 106), (295, 107), (295, 108), (295, 109), (295, 110), (295, 111), (295, 112), (295, 113), (295, 114), (295, 115), (295, 116), (295, 117), (295, 118), (295, 119), (295, 120), (295, 121), (295, 122), (295, 123), (295, 124), (295, 125), (295, 126), (295, 127), (295, 128), (295, 129), (295, 130), (295, 146), (296, 64), (296, 66), (296, 67), (296, 68), (296, 69), (296, 70), (296, 71), (296, 72), (296, 73), (296, 74), (296, 75), (296, 78), (296, 91), (296, 94), (296, 95), (296, 96), (296, 97), (296, 98), (296, 99), (296, 100), (296, 101), (296, 102), (296, 103), (296, 104), (296, 105), (296, 106), (296, 107), (296, 108), (296, 109), (296, 110), (296, 111), (296, 112), (296, 113), (296, 114), (296, 115), (296, 116), (296, 117), (296, 118), (296, 119), (296, 120), (296, 121), (296, 122), (296, 123), (296, 124), (296, 125),
(296, 126), (296, 127), (296, 128), (296, 129), (296, 130), (296, 131), (296, 132), (296, 133), (296, 134), (296, 135), (296, 136), (296, 137), (296, 138), (296, 139), (296, 140), (296, 141), (296, 142), (296, 143), (296, 144), (296, 146), (297, 64), (297, 66), (297, 67), (297, 68), (297, 69), (297, 70), (297, 71), (297, 72), (297, 73), (297, 74), (297, 77), (297, 92), (297, 96), (297, 97), (297, 98), (297, 99), (297, 100), (297, 101), (297, 102), (297, 103), (297, 104), (297, 105), (297, 106), (297, 107), (297, 108), (297, 109), (297, 110), (297, 111), (297, 112), (297, 113), (297, 114), (297, 115), (297, 116), (297, 117), (297, 118), (297, 119), (297, 120), (297, 121), (297, 122), (297, 123), (297, 124), (297, 125), (297, 126), (297, 127), (297, 128), (297, 129), (297, 130), (297, 131), (297, 132), (297, 133), (297, 134), (297, 135),
(297, 136), (297, 137), (297, 138), (297, 139), (297, 140), (297, 141), (297, 142), (297, 143), (297, 144), (297, 145), (297, 147), (298, 63), (298, 65), (298, 66), (298, 67), (298, 68), (298, 69), (298, 70), (298, 71), (298, 72), (298, 73), (298, 74), (298, 76), (298, 94), (298, 95), (298, 98), (298, 99), (298, 100), (298, 101), (298, 102), (298, 103), (298, 104), (298, 105), (298, 106), (298, 107), (298, 108), (298, 109), (298, 110), (298, 111), (298, 112), (298, 113), (298, 114), (298, 115), (298, 116), (298, 117), (298, 118), (298, 119), (298, 120), (298, 121), (298, 122), (298, 123), (298, 124), (298, 125), (298, 126), (298, 127), (298, 128), (298, 129), (298, 130), (298, 131), (298, 132), (298, 133), (298, 134), (298, 135), (298, 136), (298, 137), (298, 138), (298, 139), (298, 140), (298, 141), (298, 142), (298, 143), (298, 144),
(298, 145), (298, 147), (299, 63), (299, 65), (299, 66), (299, 67), (299, 68), (299, 69), (299, 70), (299, 71), (299, 72), (299, 73), (299, 75), (299, 96), (299, 99), (299, 100), (299, 101), (299, 102), (299, 103), (299, 104), (299, 105), (299, 106), (299, 107), (299, 108), (299, 109), (299, 110), (299, 111), (299, 112), (299, 113), (299, 114), (299, 115), (299, 116), (299, 117), (299, 118), (299, 119), (299, 120), (299, 121), (299, 122), (299, 123), (299, 124), (299, 125), (299, 126), (299, 127), (299, 128), (299, 129), (299, 130), (299, 131), (299, 132), (299, 133), (299, 134), (299, 135), (299, 136), (299, 137), (299, 138), (299, 139), (299, 140), (299, 141), (299, 142), (299, 143), (299, 144), (299, 145), (299, 147), (300, 62), (300, 64), (300, 65), (300, 66), (300, 67), (300, 68), (300, 69), (300, 70), (300, 71), (300, 72),
(300, 74), (300, 98), (300, 100), (300, 101), (300, 102), (300, 103), (300, 104), (300, 105), (300, 106), (300, 107), (300, 108), (300, 109), (300, 110), (300, 111), (300, 112), (300, 113), (300, 114), (300, 115), (300, 116), (300, 117), (300, 118), (300, 119), (300, 120), (300, 121), (300, 122), (300, 123), (300, 124), (300, 125), (300, 126), (300, 127), (300, 128), (300, 129), (300, 130), (300, 131), (300, 132), (300, 133), (300, 134), (300, 135), (300, 136), (300, 137), (300, 138), (300, 139), (300, 140), (300, 141), (300, 142), (300, 143), (300, 144), (300, 145), (300, 146), (300, 148), (301, 61), (301, 63), (301, 64), (301, 65), (301, 66), (301, 67), (301, 68), (301, 69), (301, 70), (301, 71), (301, 73), (301, 99), (301, 101), (301, 102), (301, 103), (301, 104), (301, 105), (301, 106), (301, 107), (301, 108), (301, 109), (301, 110),
(301, 111), (301, 112), (301, 113), (301, 114), (301, 115), (301, 116), (301, 117), (301, 118), (301, 119), (301, 120), (301, 121), (301, 122), (301, 123), (301, 124), (301, 125), (301, 126), (301, 127), (301, 128), (301, 129), (301, 130), (301, 131), (301, 132), (301, 133), (301, 134), (301, 135), (301, 136), (301, 137), (301, 138), (301, 139), (301, 140), (301, 141), (301, 142), (301, 143), (301, 144), (301, 145), (301, 146), (301, 148), (302, 60), (302, 62), (302, 63), (302, 64), (302, 65), (302, 66), (302, 67), (302, 68), (302, 69), (302, 70), (302, 72), (302, 99), (302, 101), (302, 102), (302, 103), (302, 104), (302, 105), (302, 106), (302, 107), (302, 108), (302, 109), (302, 110), (302, 111), (302, 112), (302, 113), (302, 114), (302, 115), (302, 116), (302, 117), (302, 118), (302, 119), (302, 120), (302, 121), (302, 122), (302, 123),
(302, 124), (302, 125), (302, 126), (302, 127), (302, 128), (302, 129), (302, 130), (302, 131), (302, 132), (302, 133), (302, 134), (302, 135), (302, 136), (302, 137), (302, 138), (302, 139), (302, 140), (302, 141), (302, 142), (302, 143), (302, 144), (302, 145), (302, 146), (302, 147), (302, 148), (302, 149), (303, 59), (303, 61), (303, 62), (303, 63), (303, 64), (303, 65), (303, 66), (303, 67), (303, 68), (303, 69), (303, 71), (303, 100), (303, 102), (303, 103), (303, 104), (303, 105), (303, 106), (303, 107), (303, 108), (303, 109), (303, 110), (303, 111), (303, 112), (303, 113), (303, 114), (303, 115), (303, 116), (303, 117), (303, 118), (303, 119), (303, 120), (303, 121), (303, 122), (303, 123), (303, 124), (303, 125), (303, 126), (303, 127), (303, 128), (303, 129), (303, 130), (303, 131), (303, 132), (303, 133), (303, 134), (303, 135),
(303, 136), (303, 137), (303, 138), (303, 139), (303, 140), (303, 141), (303, 142), (303, 143), (303, 144), (303, 145), (303, 146), (303, 147), (303, 149), (304, 58), (304, 60), (304, 61), (304, 62), (304, 63), (304, 64), (304, 65), (304, 66), (304, 67), (304, 68), (304, 70), (304, 101), (304, 103), (304, 104), (304, 105), (304, 106), (304, 107), (304, 108), (304, 109), (304, 110), (304, 111), (304, 112), (304, 113), (304, 114), (304, 115), (304, 116), (304, 117), (304, 118), (304, 119), (304, 120), (304, 121), (304, 122), (304, 123), (304, 124), (304, 125), (304, 126), (304, 127), (304, 128), (304, 129), (304, 130), (304, 131), (304, 132), (304, 133), (304, 134), (304, 135), (304, 136), (304, 137), (304, 138), (304, 139), (304, 140), (304, 141), (304, 142), (304, 143), (304, 144), (304, 145), (304, 146), (304, 147), (304, 149), (305, 58),
(305, 60), (305, 61), (305, 62), (305, 63), (305, 64), (305, 65), (305, 66), (305, 67), (305, 69), (305, 101), (305, 103), (305, 104), (305, 105), (305, 106), (305, 107), (305, 108), (305, 109), (305, 110), (305, 111), (305, 112), (305, 113), (305, 114), (305, 115), (305, 116), (305, 117), (305, 118), (305, 119), (305, 120), (305, 121), (305, 122), (305, 123), (305, 124), (305, 125), (305, 126), (305, 127), (305, 128), (305, 129), (305, 130), (305, 131), (305, 132), (305, 133), (305, 134), (305, 135), (305, 136), (305, 137), (305, 138), (305, 139), (305, 140), (305, 141), (305, 142), (305, 143), (305, 144), (305, 145), (305, 146), (305, 147), (305, 149), (306, 58), (306, 60), (306, 61), (306, 62), (306, 63), (306, 64), (306, 65), (306, 66), (306, 68), (306, 101), (306, 103), (306, 104), (306, 105), (306, 106), (306, 107), (306, 108),
(306, 109), (306, 110), (306, 111), (306, 112), (306, 113), (306, 114), (306, 115), (306, 116), (306, 117), (306, 118), (306, 119), (306, 120), (306, 121), (306, 122), (306, 123), (306, 124), (306, 125), (306, 126), (306, 127), (306, 128), (306, 129), (306, 130), (306, 131), (306, 132), (306, 133), (306, 134), (306, 135), (306, 136), (306, 137), (306, 138), (306, 139), (306, 140), (306, 141), (306, 142), (306, 143), (306, 144), (306, 145), (306, 146), (306, 147), (306, 149), (307, 58), (307, 60), (307, 61), (307, 62), (307, 63), (307, 64), (307, 67), (307, 102), (307, 104), (307, 105), (307, 106), (307, 107), (307, 108), (307, 109), (307, 110), (307, 111), (307, 112), (307, 113), (307, 114), (307, 115), (307, 116), (307, 117), (307, 118), (307, 119), (307, 120), (307, 121), (307, 122), (307, 130), (307, 131), (307, 132), (307, 133), (307, 134),
(307, 135), (307, 136), (307, 137), (307, 138), (307, 139), (307, 140), (307, 141), (307, 142), (307, 143), (307, 144), (307, 145), (307, 146), (307, 147), (307, 148), (307, 150), (308, 58), (308, 60), (308, 61), (308, 62), (308, 63), (308, 66), (308, 102), (308, 104), (308, 105), (308, 106), (308, 107), (308, 108), (308, 109), (308, 110), (308, 111), (308, 112), (308, 113), (308, 114), (308, 115), (308, 116), (308, 117), (308, 118), (308, 119), (308, 123), (308, 124), (308, 125), (308, 126), (308, 127), (308, 128), (308, 129), (308, 134), (308, 135), (308, 136), (308, 137), (308, 138), (308, 139), (308, 140), (308, 141), (308, 142), (308, 143), (308, 144), (308, 145), (308, 146), (308, 147), (308, 148), (308, 149), (308, 151), (308, 193), (309, 58), (309, 60), (309, 61), (309, 62), (309, 102), (309, 104), (309, 105), (309, 106), (309, 107),
(309, 108), (309, 109), (309, 110), (309, 111), (309, 112), (309, 113), (309, 114), (309, 115), (309, 116), (309, 117), (309, 118), (309, 121), (309, 122), (309, 130), (309, 131), (309, 132), (309, 133), (309, 137), (309, 138), (309, 139), (309, 140), (309, 141), (309, 142), (309, 143), (309, 144), (309, 145), (309, 146), (309, 147), (309, 148), (309, 149), (309, 151), (309, 192), (310, 58), (310, 63), (310, 102), (310, 104), (310, 105), (310, 106), (310, 107), (310, 108), (310, 109), (310, 110), (310, 111), (310, 112), (310, 113), (310, 114), (310, 115), (310, 116), (310, 117), (310, 119), (310, 134), (310, 136), (310, 140), (310, 141), (310, 142), (310, 143), (310, 144), (310, 145), (310, 146), (310, 147), (310, 148), (310, 149), (310, 150), (310, 152), (310, 192), (311, 58), (311, 60), (311, 62), (311, 103), (311, 105), (311, 106), (311, 107),
(311, 108), (311, 109), (311, 110), (311, 111), (311, 112), (311, 113), (311, 114), (311, 115), (311, 116), (311, 118), (311, 137), (311, 138), (311, 139), (311, 145), (311, 146), (311, 147), (311, 148), (311, 149), (311, 150), (311, 151), (311, 153), (311, 191), (311, 192), (312, 103), (312, 105), (312, 106), (312, 107), (312, 108), (312, 109), (312, 110), (312, 111), (312, 112), (312, 113), (312, 114), (312, 115), (312, 117), (312, 141), (312, 142), (312, 143), (312, 146), (312, 147), (312, 148), (312, 149), (312, 150), (312, 151), (312, 152), (312, 154), (312, 191), (312, 192), (313, 103), (313, 105), (313, 106), (313, 107), (313, 108), (313, 109), (313, 110), (313, 111), (313, 112), (313, 113), (313, 114), (313, 115), (313, 117), (313, 145), (313, 147), (313, 148), (313, 149), (313, 150), (313, 151), (313, 152), (313, 153), (313, 155), (313, 192),
(314, 103), (314, 105), (314, 106), (314, 107), (314, 108), (314, 109), (314, 110), (314, 111), (314, 112), (314, 113), (314, 114), (314, 116), (314, 146), (314, 149), (314, 150), (314, 151), (314, 152), (314, 153), (314, 154), (314, 191), (315, 104), (315, 106), (315, 107), (315, 108), (315, 109), (315, 110), (315, 111), (315, 112), (315, 113), (315, 114), (315, 116), (315, 147), (315, 150), (315, 151), (315, 152), (315, 153), (315, 154), (315, 155), (315, 158), (315, 188), (315, 191), (316, 104), (316, 106), (316, 107), (316, 108), (316, 109), (316, 110), (316, 111), (316, 112), (316, 113), (316, 115), (316, 149), (316, 151), (316, 152), (316, 153), (316, 154), (316, 155), (316, 156), (316, 159), (316, 187), (316, 190), (317, 104), (317, 106), (317, 107), (317, 108), (317, 109), (317, 110), (317, 111), (317, 112), (317, 113), (317, 115), (317, 150),
(317, 152), (317, 153), (317, 154), (317, 155), (317, 156), (317, 157), (317, 158), (317, 160), (317, 185), (317, 189), (318, 104), (318, 106), (318, 107), (318, 108), (318, 109), (318, 110), (318, 111), (318, 112), (318, 114), (318, 151), (318, 154), (318, 155), (318, 156), (318, 157), (318, 158), (318, 162), (318, 184), (318, 188), (319, 105), (319, 107), (319, 108), (319, 109), (319, 110), (319, 111), (319, 112), (319, 114), (319, 152), (319, 155), (319, 159), (319, 163), (319, 183), (319, 185), (319, 187), (320, 105), (320, 107), (320, 108), (320, 109), (320, 110), (320, 111), (320, 113), (320, 153), (320, 157), (320, 164), (320, 165), (320, 181), (320, 186), (321, 105), (321, 107), (321, 108), (321, 109), (321, 110), (321, 111), (321, 112), (321, 155), (321, 165), (321, 167), (321, 180), (321, 185), (322, 106), (322, 108), (322, 109), (322, 110),
(322, 112), (322, 166), (322, 169), (322, 178), (322, 183), (323, 107), (323, 111), (323, 167), (323, 171), (323, 172), (323, 177), (323, 178), (323, 179), (323, 180), (324, 108), (324, 110), (324, 169), (324, 171), (324, 172), (324, 173), (324, 174), (324, 175), (324, 176), (324, 177), )
coordinates_F3F3F3 = ((169, 106),
(170, 105), (170, 107), (171, 104), (171, 106), (171, 108), (171, 109), (171, 110), (171, 111), (171, 112), (171, 113), (171, 114), (171, 115), (172, 105), (172, 106), (172, 107), (172, 115), (173, 104), (173, 105), (173, 106), (173, 107), (173, 108), (173, 109), (173, 110), (173, 111), (173, 112), (173, 114), (174, 100), (174, 103), (174, 104), (174, 105), (174, 106), (174, 107), (174, 108), (174, 109), (174, 110), (174, 111), (174, 113), (175, 99), (175, 102), (175, 103), (175, 104), (175, 105), (175, 106), (175, 107), (175, 108), (175, 109), (175, 110), (175, 111), (175, 113), (176, 98), (176, 101), (176, 102), (176, 103), (176, 104), (176, 105), (176, 106), (176, 107), (176, 108), (176, 109), (176, 110), (176, 112), (177, 98), (177, 100), (177, 101), (177, 102), (177, 103), (177, 104), (177, 105), (177, 106), (177, 107), (177, 108), (177, 109),
(177, 111), (178, 98), (178, 100), (178, 101), (178, 102), (178, 103), (178, 104), (178, 105), (178, 106), (178, 107), (178, 108), (178, 110), (179, 98), (179, 100), (179, 101), (179, 102), (179, 103), (179, 104), (179, 105), (179, 106), (179, 109), (180, 98), (180, 100), (180, 101), (180, 102), (180, 103), (180, 104), (180, 105), (180, 108), (181, 98), (181, 100), (181, 101), (181, 102), (181, 103), (181, 104), (181, 107), (182, 99), (182, 102), (182, 105), (183, 100), (183, 104), (184, 102), (189, 157), (190, 157), (190, 159), (191, 157), (191, 161), (192, 157), (192, 159), (192, 162), (192, 163), (193, 157), (193, 159), (193, 160), (193, 161), (193, 164), (194, 157), (194, 159), (194, 160), (194, 161), (194, 162), (194, 165), (195, 157), (195, 159), (195, 160), (195, 161), (195, 162), (195, 163), (195, 164), (195, 167), (196, 157), (196, 159),
(196, 160), (196, 161), (196, 162), (196, 163), (196, 164), (196, 165), (196, 168), (197, 157), (197, 159), (197, 160), (197, 161), (197, 162), (197, 163), (197, 164), (197, 165), (197, 166), (197, 169), (198, 157), (198, 159), (198, 160), (198, 161), (198, 162), (198, 163), (198, 164), (198, 165), (198, 166), (198, 167), (198, 168), (198, 170), (199, 157), (199, 159), (199, 160), (199, 161), (199, 162), (199, 163), (199, 164), (199, 168), (200, 157), (200, 165), (200, 166), (200, 167), (201, 157), (201, 159), (201, 160), (201, 161), (201, 162), (201, 163), (201, 164), (201, 169), (202, 157), (202, 177), (203, 178), (203, 180), (204, 178), (204, 179), (244, 170), (246, 157), (246, 159), (246, 160), (246, 161), (246, 162), (246, 163), (246, 164), (246, 165), (246, 167), (247, 157), (247, 167), (248, 157), (248, 159), (248, 160), (248, 161), (248, 162),
(248, 163), (248, 164), (248, 165), (248, 167), (249, 157), (249, 159), (249, 160), (249, 161), (249, 162), (249, 163), (249, 164), (249, 166), (250, 157), (250, 159), (250, 160), (250, 161), (250, 162), (250, 163), (250, 164), (250, 166), (251, 157), (251, 159), (251, 160), (251, 161), (251, 162), (251, 163), (251, 165), (252, 157), (252, 159), (252, 160), (252, 161), (252, 162), (252, 163), (252, 165), (253, 157), (253, 159), (253, 160), (253, 161), (253, 162), (253, 164), (254, 157), (254, 159), (254, 160), (254, 161), (254, 163), (255, 157), (255, 159), (255, 160), (256, 157), (256, 159), (257, 157), (257, 160), (258, 157), (258, 159), (264, 95), (264, 97), (264, 98), (264, 99), (264, 100), (264, 101), (264, 102), (264, 103), (264, 104), (264, 105), (264, 106), (264, 108), (265, 95), (265, 109), (266, 95), (266, 97), (266, 98), (266, 99),
(266, 100), (266, 101), (266, 102), (266, 103), (266, 104), (266, 105), (266, 106), (266, 107), (266, 108), (266, 110), (267, 95), (267, 97), (267, 98), (267, 99), (267, 100), (267, 101), (267, 102), (267, 103), (267, 104), (267, 105), (267, 106), (267, 107), (267, 108), (267, 109), (267, 111), (268, 96), (268, 99), (268, 100), (268, 101), (268, 102), (268, 103), (268, 104), (268, 105), (268, 106), (268, 107), (268, 108), (268, 109), (268, 111), (269, 97), (269, 104), (269, 105), (269, 106), (269, 107), (269, 108), (269, 109), (269, 110), (269, 112), (270, 99), (270, 100), (270, 101), (270, 102), (270, 103), (270, 106), (270, 107), (270, 108), (270, 109), (270, 110), (270, 112), (271, 104), (271, 105), (271, 107), (271, 108), (271, 109), (271, 110), (271, 111), (271, 113), (272, 106), (272, 108), (272, 109), (272, 110), (272, 111), (272, 113),
(273, 107), (273, 110), (273, 111), (273, 112), (273, 114), (274, 107), (274, 108), (274, 109), (274, 112), (274, 114), (275, 110), (275, 111), (275, 114), (276, 114), )
coordinates_CCFF33 = ((186, 96),
(187, 95), (187, 96), (188, 95), (188, 96), (189, 95), (189, 96), (190, 95), (190, 96), (191, 96), (192, 95), (192, 96), (193, 95), (193, 96), (194, 95), (194, 96), (195, 57), (195, 59), (195, 60), (195, 61), (195, 62), (195, 63), (195, 65), (195, 96), (196, 65), (196, 96), (197, 56), (197, 58), (197, 59), (197, 60), (197, 61), (197, 62), (197, 63), (197, 64), (197, 66), (198, 56), (198, 57), (198, 58), (198, 59), (198, 60), (198, 61), (198, 62), (198, 63), (198, 64), (198, 66), (199, 55), (199, 57), (199, 58), (199, 59), (199, 60), (199, 61), (199, 62), (199, 63), (199, 64), (199, 65), (199, 67), (200, 55), (200, 57), (200, 58), (200, 59), (200, 60), (200, 61), (200, 62), (200, 63), (200, 64), (200, 65), (200, 67), (201, 54), (201, 55), (201, 56), (201, 57), (201, 58), (201, 59),
(201, 60), (201, 61), (201, 62), (201, 63), (201, 64), (201, 65), (201, 66), (201, 68), (202, 54), (202, 56), (202, 57), (202, 58), (202, 59), (202, 60), (202, 61), (202, 62), (202, 63), (202, 64), (202, 65), (202, 66), (202, 68), (203, 54), (203, 56), (203, 57), (203, 58), (203, 59), (203, 60), (203, 61), (203, 62), (203, 63), (203, 64), (203, 65), (203, 68), (204, 54), (204, 56), (204, 57), (204, 58), (204, 59), (204, 60), (204, 61), (204, 62), (204, 63), (204, 64), (204, 66), (205, 54), (205, 56), (205, 57), (205, 58), (205, 59), (205, 60), (205, 61), (205, 62), (205, 63), (205, 65), (206, 54), (206, 56), (206, 57), (206, 58), (206, 59), (206, 60), (206, 61), (206, 62), (206, 63), (206, 65), (207, 54), (207, 56), (207, 57), (207, 58), (207, 59), (207, 60), (207, 61), (207, 62),
(207, 63), (207, 65), (208, 54), (208, 56), (208, 57), (208, 58), (208, 59), (208, 60), (208, 61), (208, 62), (208, 63), (208, 64), (208, 66), (209, 53), (209, 55), (209, 56), (209, 57), (209, 58), (209, 59), (209, 60), (209, 61), (209, 62), (209, 63), (209, 64), (209, 65), (209, 67), (210, 53), (210, 55), (210, 56), (210, 57), (210, 58), (210, 59), (210, 60), (210, 61), (210, 62), (210, 63), (210, 64), (210, 65), (210, 66), (210, 68), (210, 69), (210, 70), (210, 71), (210, 72), (210, 73), (210, 74), (210, 75), (210, 76), (210, 77), (210, 78), (210, 79), (210, 80), (210, 82), (211, 53), (211, 55), (211, 56), (211, 57), (211, 58), (211, 59), (211, 60), (211, 61), (211, 62), (211, 63), (211, 64), (211, 65), (211, 66), (211, 67), (211, 83), (212, 53), (212, 55), (212, 56), (212, 57),
(212, 58), (212, 59), (212, 60), (212, 61), (212, 62), (212, 63), (212, 64), (212, 65), (212, 66), (212, 67), (212, 68), (212, 69), (212, 70), (212, 71), (212, 72), (212, 73), (212, 74), (212, 75), (212, 76), (212, 77), (212, 78), (212, 79), (212, 80), (212, 81), (212, 82), (212, 84), (213, 52), (213, 54), (213, 55), (213, 56), (213, 57), (213, 58), (213, 59), (213, 60), (213, 61), (213, 62), (213, 63), (213, 64), (213, 65), (213, 66), (213, 67), (213, 68), (213, 69), (213, 70), (213, 71), (213, 72), (213, 73), (213, 74), (213, 75), (213, 76), (213, 77), (213, 78), (213, 79), (213, 80), (213, 81), (213, 82), (213, 84), (214, 52), (214, 54), (214, 55), (214, 56), (214, 57), (214, 58), (214, 59), (214, 60), (214, 61), (214, 62), (214, 63), (214, 64), (214, 65), (214, 66), (214, 67),
(214, 68), (214, 69), (214, 70), (214, 71), (214, 72), (214, 73), (214, 74), (214, 75), (214, 76), (214, 77), (214, 78), (214, 79), (214, 80), (214, 81), (214, 82), (214, 84), (215, 52), (215, 54), (215, 55), (215, 56), (215, 57), (215, 58), (215, 59), (215, 60), (215, 61), (215, 62), (215, 63), (215, 64), (215, 65), (215, 66), (215, 67), (215, 68), (215, 69), (215, 70), (215, 71), (215, 72), (215, 73), (215, 74), (215, 75), (215, 76), (215, 77), (215, 78), (215, 79), (215, 80), (215, 81), (215, 82), (215, 84), (216, 52), (216, 54), (216, 55), (216, 56), (216, 57), (216, 58), (216, 59), (216, 60), (216, 61), (216, 62), (216, 63), (216, 64), (216, 65), (216, 66), (216, 67), (216, 68), (216, 69), (216, 70), (216, 71), (216, 72), (216, 73), (216, 74), (216, 75), (216, 76), (216, 77),
(216, 78), (216, 79), (216, 80), (216, 81), (216, 82), (216, 84), (217, 52), (217, 54), (217, 55), (217, 56), (217, 57), (217, 58), (217, 59), (217, 60), (217, 61), (217, 62), (217, 63), (217, 64), (217, 65), (217, 66), (217, 67), (217, 68), (217, 69), (217, 70), (217, 71), (217, 72), (217, 73), (217, 74), (217, 75), (217, 76), (217, 77), (217, 78), (217, 79), (217, 80), (217, 81), (217, 82), (217, 84), (218, 52), (218, 54), (218, 55), (218, 56), (218, 57), (218, 58), (218, 59), (218, 60), (218, 61), (218, 62), (218, 63), (218, 64), (218, 65), (218, 66), (218, 67), (218, 68), (218, 69), (218, 70), (218, 71), (218, 72), (218, 73), (218, 74), (218, 75), (218, 76), (218, 77), (218, 78), (218, 79), (218, 80), (218, 81), (218, 83), (219, 51), (219, 83), (220, 51), (220, 53), (220, 54),
(220, 55), (220, 56), (220, 57), (220, 58), (220, 59), (220, 60), (220, 61), (220, 62), (220, 63), (220, 64), (220, 65), (220, 66), (220, 67), (220, 68), (220, 69), (220, 70), (220, 71), (220, 72), (220, 73), (220, 74), (220, 75), (220, 76), (220, 77), (220, 78), (220, 79), (220, 80), (220, 82), (225, 51), (225, 53), (225, 54), (225, 55), (225, 56), (225, 57), (225, 58), (225, 59), (225, 60), (225, 61), (225, 62), (225, 63), (225, 64), (225, 65), (225, 66), (225, 67), (225, 68), (225, 69), (225, 70), (225, 71), (225, 72), (225, 73), (225, 74), (225, 75), (225, 76), (225, 77), (225, 78), (225, 79), (225, 80), (225, 82), (226, 51), (226, 83), (227, 51), (227, 53), (227, 54), (227, 55), (227, 56), (227, 57), (227, 58), (227, 59), (227, 60), (227, 61), (227, 62), (227, 63), (227, 64),
(227, 65), (227, 66), (227, 67), (227, 68), (227, 69), (227, 70), (227, 71), (227, 72), (227, 73), (227, 74), (227, 75), (227, 76), (227, 77), (227, 78), (227, 79), (227, 80), (227, 81), (227, 83), (228, 51), (228, 52), (228, 53), (228, 54), (228, 55), (228, 56), (228, 57), (228, 58), (228, 59), (228, 60), (228, 61), (228, 62), (228, 63), (228, 64), (228, 65), (228, 66), (228, 67), (228, 68), (228, 69), (228, 70), (228, 71), (228, 72), (228, 73), (228, 74), (228, 75), (228, 76), (228, 77), (228, 78), (228, 79), (228, 80), (228, 81), (228, 82), (228, 84), (229, 52), (229, 54), (229, 55), (229, 56), (229, 57), (229, 58), (229, 59), (229, 60), (229, 61), (229, 62), (229, 63), (229, 64), (229, 65), (229, 66), (229, 67), (229, 68), (229, 69), (229, 70), (229, 71), (229, 72), (229, 73),
(229, 74), (229, 75), (229, 76), (229, 77), (229, 78), (229, 79), (229, 80), (229, 81), (229, 82), (229, 84), (230, 52), (230, 54), (230, 55), (230, 56), (230, 57), (230, 58), (230, 59), (230, 60), (230, 61), (230, 62), (230, 63), (230, 64), (230, 65), (230, 66), (230, 67), (230, 68), (230, 69), (230, 70), (230, 71), (230, 72), (230, 73), (230, 74), (230, 75), (230, 76), (230, 77), (230, 78), (230, 79), (230, 80), (230, 81), (230, 82), (230, 84), (231, 52), (231, 54), (231, 55), (231, 56), (231, 57), (231, 58), (231, 59), (231, 60), (231, 61), (231, 62), (231, 63), (231, 64), (231, 65), (231, 66), (231, 67), (231, 68), (231, 69), (231, 70), (231, 71), (231, 72), (231, 73), (231, 74), (231, 75), (231, 76), (231, 77), (231, 78), (231, 79), (231, 80), (231, 81), (231, 82), (231, 84),
(232, 53), (232, 55), (232, 56), (232, 57), (232, 58), (232, 59), (232, 60), (232, 61), (232, 62), (232, 63), (232, 64), (232, 65), (232, 66), (232, 67), (232, 68), (232, 69), (232, 70), (232, 71), (232, 72), (232, 73), (232, 74), (232, 75), (232, 76), (232, 77), (232, 78), (232, 79), (232, 80), (232, 81), (232, 82), (232, 84), (233, 53), (233, 55), (233, 56), (233, 57), (233, 58), (233, 59), (233, 60), (233, 61), (233, 62), (233, 63), (233, 64), (233, 65), (233, 66), (233, 67), (233, 68), (233, 69), (233, 70), (233, 77), (233, 78), (233, 79), (233, 80), (233, 81), (233, 82), (233, 84), (234, 54), (234, 56), (234, 57), (234, 58), (234, 59), (234, 60), (234, 61), (234, 62), (234, 63), (234, 64), (234, 65), (234, 66), (234, 67), (234, 68), (234, 69), (234, 72), (234, 73), (234, 74),
(234, 75), (234, 76), (234, 79), (234, 80), (234, 81), (234, 82), (234, 84), (235, 54), (235, 56), (235, 57), (235, 58), (235, 59), (235, 60), (235, 61), (235, 62), (235, 63), (235, 64), (235, 65), (235, 66), (235, 67), (235, 68), (235, 70), (235, 77), (235, 80), (235, 81), (235, 82), (235, 83), (236, 55), (236, 57), (236, 58), (236, 59), (236, 60), (236, 61), (236, 62), (236, 63), (236, 64), (236, 65), (236, 66), (236, 67), (236, 69), (236, 79), (236, 83), (237, 56), (237, 58), (237, 59), (237, 60), (237, 61), (237, 62), (237, 63), (237, 64), (237, 65), (237, 66), (237, 67), (237, 69), (237, 80), (237, 83), (238, 56), (238, 58), (238, 59), (238, 60), (238, 61), (238, 62), (238, 63), (238, 64), (238, 65), (238, 66), (238, 68), (238, 82), (239, 56), (239, 58), (239, 59), (239, 60),
(239, 61), (239, 62), (239, 63), (239, 64), (239, 65), (239, 66), (239, 68), (240, 57), (240, 59), (240, 60), (240, 61), (240, 62), (240, 63), (240, 64), (240, 65), (240, 66), (240, 68), (241, 57), (241, 59), (241, 60), (241, 61), (241, 62), (241, 63), (241, 64), (241, 65), (241, 66), (241, 68), (242, 57), (242, 59), (242, 60), (242, 61), (242, 62), (242, 63), (242, 64), (242, 65), (242, 66), (242, 68), (243, 60), (243, 61), (243, 62), (243, 63), (243, 64), (243, 65), (243, 66), (243, 68), (244, 58), (244, 60), (244, 61), (244, 62), (244, 63), (244, 64), (244, 65), (244, 66), (244, 68), (245, 60), (245, 62), (245, 63), (245, 64), (245, 65), (245, 66), (245, 68), (246, 60), (246, 62), (246, 63), (246, 64), (246, 65), (246, 66), (246, 68), (247, 61), (247, 63), (247, 64), (247, 65),
(247, 66), (247, 68), (248, 61), (248, 63), (248, 64), (248, 65), (248, 66), (248, 68), (249, 61), (249, 63), (249, 64), (249, 65), (249, 66), (249, 68), (250, 61), (250, 63), (250, 64), (250, 68), (251, 61), (251, 66), (252, 61), (252, 64), (253, 61), (253, 62), )
coordinates_CCCCFF = ((112, 73),
(112, 75), (113, 72), (113, 76), (114, 59), (114, 60), (114, 61), (114, 62), (114, 63), (114, 64), (114, 65), (114, 66), (114, 67), (114, 68), (114, 69), (114, 70), (114, 71), (114, 73), (114, 74), (114, 75), (114, 77), (115, 57), (115, 59), (115, 60), (115, 61), (115, 62), (115, 63), (115, 64), (115, 65), (115, 66), (115, 67), (115, 68), (115, 69), (115, 70), (115, 72), (115, 73), (115, 74), (115, 75), (115, 77), (116, 55), (116, 59), (116, 60), (116, 61), (116, 62), (116, 63), (116, 64), (116, 65), (116, 66), (116, 67), (116, 68), (116, 69), (116, 70), (116, 71), (116, 72), (116, 73), (116, 74), (116, 75), (116, 77), (117, 53), (117, 57), (117, 58), (117, 59), (117, 60), (117, 61), (117, 62), (117, 63), (117, 64), (117, 65), (117, 66), (117, 67), (117, 68), (117, 69), (117, 70),
(117, 71), (117, 72), (117, 73), (117, 74), (117, 75), (117, 77), (118, 52), (118, 55), (118, 56), (118, 57), (118, 58), (118, 59), (118, 60), (118, 61), (118, 62), (118, 63), (118, 64), (118, 65), (118, 66), (118, 67), (118, 68), (118, 69), (118, 70), (118, 71), (118, 72), (118, 73), (118, 74), (118, 75), (118, 76), (118, 78), (119, 51), (119, 53), (119, 54), (119, 55), (119, 56), (119, 57), (119, 58), (119, 59), (119, 60), (119, 61), (119, 62), (119, 63), (119, 64), (119, 65), (119, 66), (119, 67), (119, 68), (119, 69), (119, 70), (119, 71), (119, 72), (119, 73), (119, 74), (119, 75), (119, 76), (119, 78), (120, 50), (120, 52), (120, 53), (120, 54), (120, 55), (120, 56), (120, 57), (120, 58), (120, 59), (120, 60), (120, 61), (120, 62), (120, 63), (120, 64), (120, 65), (120, 66),
(120, 67), (120, 68), (120, 69), (120, 70), (120, 71), (120, 72), (120, 73), (120, 74), (120, 75), (120, 76), (120, 78), (121, 49), (121, 51), (121, 52), (121, 53), (121, 54), (121, 55), (121, 56), (121, 57), (121, 58), (121, 59), (121, 60), (121, 61), (121, 62), (121, 63), (121, 64), (121, 65), (121, 66), (121, 67), (121, 68), (121, 69), (121, 70), (121, 71), (121, 72), (121, 73), (121, 74), (121, 75), (121, 76), (121, 78), (122, 48), (122, 50), (122, 51), (122, 52), (122, 53), (122, 54), (122, 55), (122, 56), (122, 57), (122, 58), (122, 59), (122, 60), (122, 61), (122, 62), (122, 63), (122, 64), (122, 65), (122, 66), (122, 67), (122, 68), (122, 69), (122, 70), (122, 71), (122, 72), (122, 73), (122, 74), (122, 75), (122, 76), (122, 77), (122, 79), (123, 47), (123, 49), (123, 50),
(123, 51), (123, 52), (123, 53), (123, 54), (123, 55), (123, 56), (123, 57), (123, 58), (123, 59), (123, 60), (123, 61), (123, 62), (123, 63), (123, 64), (123, 65), (123, 66), (123, 67), (123, 68), (123, 69), (123, 70), (123, 71), (123, 72), (123, 73), (123, 74), (123, 75), (123, 76), (123, 77), (123, 79), (124, 46), (124, 48), (124, 49), (124, 50), (124, 51), (124, 52), (124, 53), (124, 54), (124, 55), (124, 56), (124, 57), (124, 58), (124, 59), (124, 60), (124, 61), (124, 62), (124, 63), (124, 64), (124, 65), (124, 66), (124, 67), (124, 68), (124, 69), (124, 70), (124, 71), (124, 72), (124, 73), (124, 74), (124, 75), (124, 76), (124, 77), (124, 78), (124, 80), (125, 46), (125, 48), (125, 49), (125, 50), (125, 51), (125, 52), (125, 53), (125, 54), (125, 55), (125, 56), (125, 57),
(125, 58), (125, 59), (125, 60), (125, 61), (125, 62), (125, 63), (125, 64), (125, 65), (125, 66), (125, 67), (125, 68), (125, 69), (125, 70), (125, 71), (125, 72), (125, 73), (125, 74), (125, 75), (125, 76), (125, 77), (125, 78), (125, 79), (125, 81), (126, 46), (126, 48), (126, 49), (126, 50), (126, 51), (126, 52), (126, 53), (126, 54), (126, 55), (126, 56), (126, 57), (126, 58), (126, 59), (126, 60), (126, 61), (126, 62), (126, 63), (126, 64), (126, 65), (126, 66), (126, 67), (126, 68), (126, 69), (126, 70), (126, 71), (126, 72), (126, 73), (126, 74), (126, 75), (126, 76), (126, 77), (126, 78), (126, 79), (126, 82), (127, 45), (127, 47), (127, 48), (127, 49), (127, 50), (127, 51), (127, 52), (127, 53), (127, 54), (127, 55), (127, 56), (127, 57), (127, 58), (127, 59), (127, 60),
(127, 61), (127, 62), (127, 63), (127, 64), (127, 65), (127, 66), (127, 67), (127, 68), (127, 69), (127, 70), (127, 71), (127, 72), (127, 73), (127, 74), (127, 75), (127, 76), (127, 77), (127, 78), (127, 79), (127, 80), (127, 83), (128, 45), (128, 47), (128, 48), (128, 49), (128, 50), (128, 51), (128, 52), (128, 53), (128, 54), (128, 55), (128, 56), (128, 57), (128, 58), (128, 59), (128, 60), (128, 61), (128, 62), (128, 63), (128, 64), (128, 65), (128, 66), (128, 67), (128, 68), (128, 69), (128, 70), (128, 71), (128, 72), (128, 73), (128, 74), (128, 75), (128, 76), (128, 77), (128, 78), (128, 79), (128, 80), (128, 81), (128, 84), (128, 85), (129, 46), (129, 48), (129, 49), (129, 50), (129, 51), (129, 52), (129, 53), (129, 54), (129, 55), (129, 56), (129, 57), (129, 58), (129, 59),
(129, 60), (129, 61), (129, 62), (129, 63), (129, 64), (129, 65), (129, 66), (129, 67), (129, 68), (129, 69), (129, 70), (129, 71), (129, 72), (129, 73), (129, 74), (129, 75), (129, 76), (129, 77), (129, 78), (129, 79), (129, 80), (129, 81), (129, 82), (129, 83), (129, 86), (129, 87), (129, 88), (129, 90), (130, 47), (130, 50), (130, 51), (130, 52), (130, 53), (130, 54), (130, 55), (130, 56), (130, 57), (130, 58), (130, 59), (130, 60), (130, 61), (130, 62), (130, 63), (130, 64), (130, 65), (130, 66), (130, 67), (130, 68), (130, 69), (130, 70), (130, 71), (130, 72), (130, 73), (130, 74), (130, 75), (130, 76), (130, 77), (130, 78), (130, 79), (130, 80), (130, 81), (130, 82), (130, 83), (130, 84), (130, 85), (130, 90), (131, 48), (131, 51), (131, 52), (131, 53), (131, 54), (131, 55),
(131, 56), (131, 57), (131, 58), (131, 59), (131, 60), (131, 61), (131, 62), (131, 63), (131, 64), (131, 65), (131, 66), (131, 67), (131, 68), (131, 69), (131, 70), (131, 71), (131, 72), (131, 73), (131, 74), (131, 75), (131, 76), (131, 77), (131, 78), (131, 79), (131, 80), (131, 81), (131, 82), (131, 83), (131, 84), (131, 85), (131, 86), (131, 87), (131, 88), (131, 90), (132, 50), (132, 53), (132, 54), (132, 55), (132, 56), (132, 57), (132, 58), (132, 59), (132, 60), (132, 63), (132, 64), (132, 65), (132, 66), (132, 67), (132, 68), (132, 69), (132, 70), (132, 71), (132, 72), (132, 73), (132, 74), (132, 75), (132, 76), (132, 77), (132, 78), (132, 79), (132, 80), (132, 81), (132, 82), (132, 83), (132, 84), (132, 85), (132, 86), (132, 87), (132, 89), (133, 51), (133, 52), (133, 55),
(133, 56), (133, 57), (133, 58), (133, 59), (133, 62), (133, 65), (133, 66), (133, 67), (133, 68), (133, 69), (133, 70), (133, 71), (133, 72), (133, 73), (133, 74), (133, 75), (133, 76), (133, 77), (133, 78), (133, 79), (133, 80), (133, 81), (133, 82), (133, 83), (133, 84), (133, 85), (133, 86), (133, 87), (133, 89), (134, 53), (134, 54), (134, 58), (134, 59), (134, 60), (134, 63), (134, 64), (134, 67), (134, 68), (134, 69), (134, 70), (134, 71), (134, 72), (134, 73), (134, 74), (134, 75), (134, 76), (134, 77), (134, 78), (134, 79), (134, 80), (134, 81), (134, 82), (134, 83), (134, 84), (134, 85), (134, 86), (134, 87), (134, 89), (135, 55), (135, 56), (135, 57), (135, 59), (135, 65), (135, 68), (135, 69), (135, 70), (135, 71), (135, 72), (135, 73), (135, 74), (135, 75), (135, 76),
(135, 77), (135, 78), (135, 79), (135, 80), (135, 81), (135, 82), (135, 83), (135, 84), (135, 85), (135, 86), (135, 87), (135, 89), (136, 59), (136, 67), (136, 70), (136, 71), (136, 72), (136, 73), (136, 74), (136, 75), (136, 76), (136, 77), (136, 78), (136, 79), (136, 80), (136, 81), (136, 82), (136, 83), (136, 84), (136, 85), (136, 86), (136, 87), (136, 89), (137, 68), (137, 71), (137, 72), (137, 73), (137, 74), (137, 75), (137, 76), (137, 77), (137, 78), (137, 79), (137, 80), (137, 81), (137, 82), (137, 83), (137, 84), (137, 85), (137, 86), (137, 87), (137, 89), (138, 70), (138, 72), (138, 73), (138, 74), (138, 75), (138, 76), (138, 77), (138, 78), (138, 79), (138, 80), (138, 81), (138, 82), (138, 83), (138, 84), (138, 85), (138, 86), (138, 87), (138, 89), (139, 71), (139, 73),
(139, 74), (139, 75), (139, 76), (139, 77), (139, 78), (139, 79), (139, 80), (139, 81), (139, 82), (139, 83), (139, 84), (139, 85), (139, 86), (139, 87), (139, 89), (140, 72), (140, 75), (140, 76), (140, 77), (140, 78), (140, 79), (140, 80), (140, 81), (140, 82), (140, 83), (140, 84), (140, 85), (140, 86), (140, 87), (140, 89), (141, 73), (141, 76), (141, 77), (141, 78), (141, 79), (141, 80), (141, 81), (141, 82), (141, 83), (141, 84), (141, 85), (141, 86), (141, 87), (141, 89), (142, 77), (142, 78), (142, 79), (142, 80), (142, 81), (142, 82), (142, 83), (142, 84), (142, 85), (142, 86), (142, 87), (142, 88), (142, 90), (143, 76), (143, 78), (143, 79), (143, 80), (143, 81), (143, 82), (143, 83), (143, 84), (143, 85), (143, 86), (143, 87), (143, 88), (143, 90), (144, 77), (144, 79),
(144, 80), (144, 81), (144, 82), (144, 83), (144, 84), (144, 85), (144, 86), (144, 87), (144, 88), (144, 90), (145, 78), (145, 80), (145, 81), (145, 82), (145, 83), (145, 84), (145, 85), (145, 86), (145, 87), (145, 88), (145, 89), (145, 91), (146, 79), (146, 82), (146, 83), (146, 84), (146, 85), (146, 86), (146, 87), (146, 88), (146, 89), (146, 91), (147, 80), (147, 83), (147, 84), (147, 85), (147, 86), (147, 87), (147, 88), (147, 93), (148, 81), (148, 84), (148, 85), (148, 86), (148, 87), (148, 90), (148, 92), (149, 83), (149, 85), (149, 86), (149, 88), (150, 84), (150, 87), (151, 85), (151, 86), (296, 80), (296, 81), (296, 82), (296, 83), (296, 84), (296, 85), (296, 87), (297, 79), (297, 89), (298, 78), (298, 81), (298, 82), (298, 83), (298, 84), (298, 85), (298, 86), (298, 87),
(298, 90), (299, 77), (299, 80), (299, 81), (299, 82), (299, 83), (299, 84), (299, 85), (299, 86), (299, 88), (300, 76), (300, 79), (300, 80), (300, 81), (300, 82), (300, 83), (300, 84), (300, 85), (300, 87), (301, 75), (301, 78), (301, 79), (301, 80), (301, 81), (301, 82), (301, 83), (301, 84), (301, 86), (302, 74), (302, 77), (302, 78), (302, 79), (302, 80), (302, 81), (302, 82), (302, 83), (302, 84), (302, 86), (303, 76), (303, 77), (303, 78), (303, 79), (303, 80), (303, 81), (303, 82), (303, 83), (303, 85), (304, 75), (304, 76), (304, 77), (304, 78), (304, 79), (304, 80), (304, 81), (304, 82), (304, 83), (304, 85), (305, 74), (305, 75), (305, 76), (305, 77), (305, 78), (305, 79), (305, 80), (305, 81), (305, 82), (305, 84), (306, 73), (306, 74), (306, 75), (306, 76), (306, 77),
(306, 78), (306, 79), (306, 80), (306, 84), (307, 69), (307, 72), (307, 73), (307, 74), (307, 75), (307, 76), (307, 77), (307, 78), (307, 79), (307, 82), (308, 68), (308, 71), (308, 72), (308, 73), (308, 74), (308, 75), (308, 76), (308, 77), (308, 78), (308, 80), (309, 56), (309, 67), (309, 70), (309, 71), (309, 72), (309, 73), (309, 74), (309, 75), (309, 76), (309, 77), (309, 79), (310, 55), (310, 56), (310, 66), (310, 69), (310, 70), (310, 71), (310, 72), (310, 73), (310, 74), (310, 75), (310, 76), (310, 78), (311, 54), (311, 56), (311, 65), (311, 67), (311, 68), (311, 69), (311, 70), (311, 71), (311, 72), (311, 73), (311, 74), (311, 75), (311, 76), (311, 78), (312, 53), (312, 55), (312, 56), (312, 63), (312, 66), (312, 67), (312, 68), (312, 69), (312, 70), (312, 71), (312, 72),
(312, 73), (312, 74), (312, 75), (312, 77), (313, 52), (313, 54), (313, 55), (313, 56), (313, 58), (313, 61), (313, 62), (313, 65), (313, 66), (313, 67), (313, 68), (313, 69), (313, 70), (313, 71), (313, 72), (313, 73), (313, 74), (313, 75), (313, 77), (314, 51), (314, 53), (314, 54), (314, 55), (314, 56), (314, 59), (314, 60), (314, 63), (314, 64), (314, 65), (314, 66), (314, 67), (314, 68), (314, 69), (314, 70), (314, 71), (314, 72), (314, 73), (314, 74), (314, 76), (315, 50), (315, 52), (315, 53), (315, 54), (315, 55), (315, 56), (315, 57), (315, 58), (315, 61), (315, 62), (315, 63), (315, 64), (315, 65), (315, 66), (315, 67), (315, 68), (315, 69), (315, 70), (315, 71), (315, 72), (315, 73), (315, 74), (315, 76), (316, 49), (316, 51), (316, 52), (316, 53), (316, 54), (316, 55),
(316, 56), (316, 57), (316, 58), (316, 59), (316, 60), (316, 61), (316, 62), (316, 63), (316, 64), (316, 65), (316, 66), (316, 67), (316, 68), (316, 69), (316, 70), (316, 71), (316, 72), (316, 73), (316, 74), (316, 76), (317, 48), (317, 50), (317, 51), (317, 52), (317, 53), (317, 54), (317, 55), (317, 56), (317, 57), (317, 58), (317, 59), (317, 60), (317, 61), (317, 62), (317, 63), (317, 64), (317, 65), (317, 66), (317, 67), (317, 68), (317, 69), (317, 70), (317, 71), (317, 72), (317, 73), (317, 75), (318, 47), (318, 49), (318, 50), (318, 51), (318, 52), (318, 53), (318, 54), (318, 55), (318, 56), (318, 57), (318, 58), (318, 59), (318, 60), (318, 61), (318, 62), (318, 63), (318, 64), (318, 65), (318, 66), (318, 67), (318, 68), (318, 69), (318, 70), (318, 71), (318, 72), (318, 73),
(318, 75), (319, 46), (319, 48), (319, 49), (319, 50), (319, 51), (319, 52), (319, 53), (319, 54), (319, 55), (319, 56), (319, 57), (319, 58), (319, 59), (319, 60), (319, 61), (319, 62), (319, 63), (319, 64), (319, 65), (319, 66), (319, 67), (319, 68), (319, 69), (319, 70), (319, 71), (319, 72), (319, 73), (319, 75), (320, 45), (320, 50), (320, 51), (320, 52), (320, 53), (320, 54), (320, 55), (320, 56), (320, 57), (320, 58), (320, 59), (320, 60), (320, 61), (320, 62), (320, 63), (320, 64), (320, 65), (320, 66), (320, 67), (320, 68), (320, 69), (320, 70), (320, 71), (320, 72), (320, 73), (320, 75), (321, 46), (321, 48), (321, 49), (321, 52), (321, 53), (321, 54), (321, 55), (321, 56), (321, 57), (321, 58), (321, 59), (321, 60), (321, 61), (321, 62), (321, 63), (321, 64), (321, 65),
(321, 66), (321, 67), (321, 68), (321, 69), (321, 70), (321, 71), (321, 72), (321, 73), (321, 75), (322, 50), (322, 53), (322, 54), (322, 55), (322, 56), (322, 57), (322, 58), (322, 59), (322, 60), (322, 61), (322, 62), (322, 63), (322, 64), (322, 65), (322, 66), (322, 67), (322, 68), (322, 69), (322, 70), (322, 71), (322, 72), (322, 74), (323, 52), (323, 54), (323, 55), (323, 56), (323, 57), (323, 58), (323, 59), (323, 60), (323, 61), (323, 62), (323, 63), (323, 64), (323, 65), (323, 66), (323, 67), (323, 68), (323, 69), (323, 70), (323, 71), (323, 72), (323, 74), (324, 53), (324, 56), (324, 57), (324, 58), (324, 59), (324, 60), (324, 61), (324, 62), (324, 63), (324, 64), (324, 65), (324, 66), (324, 67), (324, 68), (324, 69), (324, 70), (324, 71), (324, 72), (324, 74), (325, 54),
(325, 57), (325, 58), (325, 59), (325, 60), (325, 61), (325, 62), (325, 63), (325, 64), (325, 65), (325, 66), (325, 67), (325, 68), (325, 69), (325, 70), (325, 71), (325, 72), (325, 74), (326, 56), (326, 58), (326, 59), (326, 60), (326, 61), (326, 62), (326, 63), (326, 64), (326, 65), (326, 66), (326, 67), (326, 68), (326, 69), (326, 70), (326, 71), (326, 72), (326, 74), (327, 57), (327, 60), (327, 61), (327, 62), (327, 63), (327, 64), (327, 65), (327, 66), (327, 67), (327, 68), (327, 69), (327, 70), (327, 71), (327, 72), (327, 74), (328, 58), (328, 61), (328, 62), (328, 63), (328, 64), (328, 65), (328, 66), (328, 67), (328, 68), (328, 69), (328, 70), (328, 71), (328, 74), (329, 60), (329, 63), (329, 64), (329, 65), (329, 66), (329, 67), (329, 68), (329, 69), (329, 72), (330, 61),
(330, 71), (331, 64), (331, 65), (331, 66), (331, 67), (331, 69), )
coordinates_FFFF00 = ((195, 67),
(195, 69), (195, 70), (195, 71), (195, 72), (195, 74), (196, 67), (196, 73), (197, 68), (197, 70), (197, 72), (198, 69), (198, 71), (199, 69), (199, 71), (200, 70), (200, 71), (201, 70), (201, 72), (202, 70), (202, 73), (203, 70), (203, 72), (203, 75), (204, 69), (204, 71), (204, 72), (204, 73), (204, 77), (205, 68), (205, 70), (205, 71), (205, 72), (205, 73), (205, 74), (205, 75), (205, 79), (206, 67), (206, 69), (206, 70), (206, 71), (206, 72), (206, 73), (206, 74), (206, 75), (206, 76), (206, 77), (206, 80), (207, 67), (207, 80), (208, 69), (208, 71), (208, 72), (208, 73), (208, 74), (208, 75), (208, 76), (208, 77), (208, 78), (208, 80), (236, 72), (236, 74), (236, 75), (236, 76), (237, 71), (237, 77), (238, 71), (238, 73), (238, 74), (238, 75), (238, 76), (238, 79), (239, 70),
(239, 72), (239, 73), (239, 74), (239, 75), (239, 76), (239, 77), (239, 80), (240, 70), (240, 72), (240, 73), (240, 74), (240, 75), (240, 76), (240, 77), (240, 78), (240, 80), (241, 70), (241, 72), (241, 73), (241, 74), (241, 75), (241, 76), (241, 77), (241, 78), (241, 80), (242, 70), (242, 72), (242, 73), (242, 74), (242, 75), (242, 76), (242, 77), (242, 78), (243, 70), (243, 72), (243, 73), (243, 74), (243, 75), (243, 76), (243, 77), (243, 79), (244, 70), (244, 72), (244, 73), (244, 74), (244, 75), (244, 76), (244, 78), (245, 70), (245, 72), (245, 73), (245, 74), (245, 75), (245, 76), (245, 78), (246, 70), (246, 72), (246, 73), (246, 74), (246, 75), (246, 77), (247, 70), (247, 72), (247, 73), (247, 74), (247, 75), (247, 77), (248, 70), (248, 77), (249, 70), (249, 72), (249, 73),
(249, 74), (249, 75), (249, 76), (250, 70), )
coordinates_33FFFF = ((189, 70),
(189, 72), (189, 73), (189, 74), (189, 76), (190, 77), (191, 71), (191, 73), (191, 74), (191, 75), (191, 77), (192, 72), (192, 76), (192, 78), (193, 73), (193, 75), (193, 76), (193, 77), (193, 79), (194, 76), (194, 78), (194, 80), (195, 76), (195, 78), (195, 79), (195, 81), (196, 75), (196, 77), (196, 78), (196, 79), (196, 80), (196, 82), (197, 74), (197, 76), (197, 77), (197, 78), (197, 79), (197, 80), (197, 81), (197, 83), (198, 73), (198, 76), (198, 77), (198, 78), (198, 79), (198, 80), (198, 81), (198, 82), (198, 84), (199, 73), (199, 75), (199, 76), (199, 77), (199, 78), (199, 79), (199, 80), (199, 81), (199, 82), (199, 83), (199, 85), (200, 74), (200, 77), (200, 78), (200, 79), (200, 80), (200, 81), (200, 82), (200, 83), (200, 84), (200, 86), (201, 75), (201, 79), (201, 80),
(201, 81), (201, 82), (201, 83), (201, 84), (201, 85), (201, 87), (202, 77), (202, 80), (202, 81), (202, 82), (202, 83), (202, 84), (202, 85), (202, 86), (202, 88), (203, 79), (203, 81), (203, 82), (203, 83), (203, 84), (203, 85), (203, 86), (204, 80), (204, 82), (204, 83), (204, 84), (204, 85), (204, 86), (204, 87), (204, 89), (205, 81), (205, 83), (205, 84), (205, 85), (205, 86), (205, 87), (205, 88), (205, 90), (206, 82), (206, 84), (206, 85), (206, 86), (206, 87), (206, 88), (206, 90), (207, 82), (207, 84), (207, 85), (207, 86), (207, 87), (207, 88), (207, 89), (207, 91), (208, 83), (208, 85), (208, 86), (208, 87), (208, 88), (208, 89), (208, 91), (209, 84), (209, 86), (209, 87), (209, 88), (209, 89), (209, 91), (210, 85), (210, 87), (210, 88), (210, 89), (210, 91), (211, 85),
(211, 87), (211, 88), (211, 89), (211, 91), (212, 86), (212, 88), (212, 89), (212, 90), (212, 92), (213, 86), (213, 88), (213, 89), (213, 90), (213, 92), (214, 86), (214, 88), (214, 89), (214, 90), (214, 92), (215, 86), (215, 88), (215, 89), (215, 90), (215, 92), (216, 86), (216, 88), (216, 89), (216, 90), (216, 91), (216, 93), (217, 86), (217, 88), (217, 89), (217, 90), (217, 91), (217, 93), (218, 86), (218, 88), (218, 89), (218, 90), (218, 91), (218, 92), (218, 94), (219, 85), (219, 87), (219, 88), (219, 89), (219, 90), (219, 91), (219, 92), (219, 93), (220, 85), (220, 87), (220, 88), (220, 89), (220, 90), (220, 91), (220, 92), (220, 93), (220, 94), (221, 85), (221, 86), (221, 87), (221, 88), (221, 89), (221, 90), (221, 91), (221, 92), (221, 93), (221, 94), (221, 95), (221, 98),
(222, 83), (222, 85), (222, 86), (222, 87), (222, 88), (222, 89), (222, 90), (222, 91), (222, 92), (222, 93), (222, 94), (222, 95), (222, 96), (222, 99), (223, 83), (223, 85), (223, 86), (223, 87), (223, 88), (223, 89), (223, 90), (223, 91), (223, 92), (223, 93), (223, 94), (223, 95), (223, 96), (223, 98), (224, 84), (224, 86), (224, 87), (224, 88), (224, 89), (224, 90), (224, 91), (224, 92), (224, 93), (224, 94), (224, 95), (224, 97), (225, 85), (225, 87), (225, 88), (225, 89), (225, 90), (225, 91), (225, 92), (225, 93), (225, 94), (225, 96), (226, 85), (226, 87), (226, 88), (226, 89), (226, 90), (226, 91), (226, 92), (226, 93), (226, 95), (227, 86), (227, 88), (227, 89), (227, 90), (227, 91), (227, 92), (227, 94), (228, 86), (228, 88), (228, 89), (228, 90), (228, 91), (228, 93),
(229, 86), (229, 88), (229, 89), (229, 90), (229, 91), (229, 93), (230, 86), (230, 88), (230, 89), (230, 90), (230, 92), (231, 86), (231, 88), (231, 89), (231, 90), (231, 92), (232, 86), (232, 88), (232, 89), (232, 90), (232, 92), (233, 86), (233, 88), (233, 89), (233, 90), (233, 92), (234, 86), (234, 88), (234, 89), (234, 90), (234, 92), (235, 86), (235, 88), (235, 89), (235, 91), (236, 85), (236, 86), (236, 87), (236, 88), (236, 89), (236, 91), (237, 85), (237, 87), (237, 88), (237, 89), (237, 91), (238, 84), (238, 86), (238, 87), (238, 88), (238, 89), (238, 91), (239, 83), (239, 85), (239, 86), (239, 87), (239, 88), (239, 89), (239, 91), (240, 82), (240, 85), (240, 86), (240, 87), (240, 88), (240, 90), (241, 82), (241, 84), (241, 85), (241, 86), (241, 87), (241, 88), (241, 90),
(242, 82), (242, 84), (242, 85), (242, 86), (242, 87), (242, 88), (242, 90), (243, 81), (243, 83), (243, 84), (243, 85), (243, 86), (243, 87), (243, 88), (243, 90), (244, 81), (244, 83), (244, 84), (244, 85), (244, 86), (244, 87), (244, 89), (245, 80), (245, 82), (245, 83), (245, 84), (245, 85), (245, 86), (245, 88), (246, 80), (246, 82), (246, 83), (246, 84), (246, 85), (247, 80), (247, 82), (247, 83), (247, 84), (247, 86), (248, 79), (248, 81), (248, 82), (248, 83), (248, 85), (249, 78), (249, 80), (249, 81), (249, 82), (249, 83), (249, 85), (250, 77), (250, 79), (250, 80), (250, 81), (250, 82), (250, 84), (251, 76), (251, 78), (251, 79), (251, 80), (251, 81), (251, 82), (251, 84), (252, 73), (252, 75), (252, 77), (252, 78), (252, 79), (252, 80), (252, 81), (252, 82), (252, 84),
(253, 73), (253, 76), (253, 77), (253, 78), (253, 79), (253, 80), (253, 81), (254, 73), (254, 75), (254, 76), (254, 83), (255, 72), (255, 76), (255, 77), (255, 78), (255, 79), (255, 80), (255, 82), (256, 72), (256, 74), (256, 75), (256, 76), (256, 81), )
coordinates_FF00FF = ((168, 39),
(168, 41), (168, 42), (168, 43), (168, 44), (168, 45), (168, 46), (168, 47), (168, 48), (168, 49), (168, 50), (168, 51), (168, 52), (168, 53), (168, 54), (168, 55), (168, 56), (168, 57), (168, 59), (169, 39), (169, 61), (170, 39), (170, 41), (170, 42), (170, 43), (170, 44), (170, 45), (170, 46), (170, 47), (170, 48), (170, 49), (170, 50), (170, 51), (170, 52), (170, 53), (170, 54), (170, 55), (170, 56), (170, 57), (170, 58), (170, 59), (170, 62), (171, 39), (171, 61), (172, 39), (172, 41), (172, 42), (172, 43), (172, 44), (172, 45), (172, 46), (172, 47), (172, 48), (172, 49), (172, 50), (172, 51), (172, 52), (172, 53), (172, 54), (172, 55), (172, 56), (172, 57), (172, 58), (172, 59), (272, 42), (272, 44), (272, 45), (272, 46), (272, 47), (272, 48), (272, 49), (272, 50), (272, 51),
(272, 52), (272, 53), (272, 54), (272, 55), (272, 56), (272, 57), (273, 42), (273, 44), (273, 58), (273, 59), (273, 61), (274, 42), (274, 44), (274, 45), (274, 46), (274, 47), (274, 48), (274, 49), (274, 50), (274, 51), (274, 52), (274, 53), (274, 54), (274, 55), (274, 56), (274, 57), (274, 62), (275, 42), (275, 44), (275, 45), (275, 46), (275, 47), (275, 48), (275, 49), (275, 50), (275, 51), (275, 52), (275, 53), (275, 54), (275, 55), (275, 56), (275, 61), (276, 42), (276, 44), (276, 45), (276, 46), (276, 47), (276, 48), (276, 49), (276, 50), (276, 51), (276, 52), (276, 57), (276, 59), (277, 42), (277, 44), (277, 45), (277, 46), (277, 47), (277, 53), (277, 54), (277, 55), (278, 42), (278, 47), (278, 48), (278, 49), (278, 50), (278, 51), (279, 42), (279, 44), (279, 45), (279, 46),
)
coordinates_CCFF99 = ((98, 114),
(98, 115), (98, 116), (98, 117), (98, 118), (98, 119), (98, 120), (98, 121), (98, 122), (98, 124), (99, 109), (99, 110), (99, 111), (99, 112), (99, 113), (99, 126), (100, 105), (100, 107), (100, 108), (100, 114), (100, 115), (100, 116), (100, 117), (100, 118), (100, 119), (100, 120), (100, 121), (100, 122), (100, 123), (100, 124), (100, 127), (101, 102), (101, 104), (101, 109), (101, 110), (101, 111), (101, 112), (101, 113), (101, 114), (101, 115), (101, 116), (101, 117), (101, 118), (101, 119), (101, 120), (101, 121), (101, 122), (101, 123), (101, 124), (101, 125), (101, 127), (102, 99), (102, 100), (102, 101), (102, 105), (102, 106), (102, 107), (102, 108), (102, 109), (102, 110), (102, 111), (102, 112), (102, 113), (102, 114), (102, 115), (102, 116), (102, 117), (102, 118), (102, 119), (102, 120), (102, 121), (102, 122), (102, 123),
(102, 124), (102, 125), (102, 127), (103, 97), (103, 102), (103, 103), (103, 104), (103, 105), (103, 106), (103, 107), (103, 108), (103, 109), (103, 110), (103, 111), (103, 112), (103, 113), (103, 114), (103, 115), (103, 116), (103, 117), (103, 118), (103, 119), (103, 120), (103, 121), (103, 122), (103, 123), (103, 124), (103, 125), (103, 127), (104, 95), (104, 99), (104, 100), (104, 101), (104, 102), (104, 103), (104, 104), (104, 105), (104, 106), (104, 107), (104, 108), (104, 109), (104, 110), (104, 111), (104, 112), (104, 113), (104, 114), (104, 115), (104, 116), (104, 117), (104, 118), (104, 119), (104, 120), (104, 121), (104, 122), (104, 123), (104, 124), (104, 125), (104, 127), (104, 128), (105, 93), (105, 97), (105, 98), (105, 99), (105, 100), (105, 101), (105, 102), (105, 103), (105, 104), (105, 105), (105, 106), (105, 107), (105, 108),
(105, 109), (105, 110), (105, 111), (105, 112), (105, 113), (105, 114), (105, 115), (105, 116), (105, 117), (105, 118), (105, 119), (105, 120), (105, 121), (105, 122), (105, 123), (105, 124), (105, 125), (105, 126), (105, 128), (106, 92), (106, 95), (106, 96), (106, 97), (106, 98), (106, 99), (106, 100), (106, 101), (106, 102), (106, 103), (106, 104), (106, 105), (106, 106), (106, 107), (106, 108), (106, 109), (106, 110), (106, 111), (106, 112), (106, 113), (106, 114), (106, 115), (106, 116), (106, 117), (106, 118), (106, 119), (106, 120), (106, 121), (106, 122), (106, 123), (106, 124), (106, 125), (106, 126), (106, 128), (107, 90), (107, 93), (107, 94), (107, 95), (107, 96), (107, 97), (107, 98), (107, 99), (107, 100), (107, 101), (107, 102), (107, 103), (107, 104), (107, 105), (107, 106), (107, 107), (107, 108), (107, 109), (107, 110),
(107, 111), (107, 112), (107, 113), (107, 114), (107, 115), (107, 116), (107, 117), (107, 118), (107, 119), (107, 120), (107, 121), (107, 122), (107, 123), (107, 124), (107, 125), (107, 126), (107, 127), (107, 128), (107, 129), (108, 89), (108, 92), (108, 93), (108, 94), (108, 95), (108, 96), (108, 97), (108, 98), (108, 99), (108, 100), (108, 101), (108, 102), (108, 103), (108, 104), (108, 105), (108, 106), (108, 107), (108, 108), (108, 109), (108, 110), (108, 111), (108, 112), (108, 113), (108, 114), (108, 115), (108, 116), (108, 117), (108, 118), (108, 119), (108, 120), (108, 121), (108, 122), (108, 123), (108, 124), (108, 125), (108, 126), (108, 127), (108, 129), (109, 88), (109, 90), (109, 91), (109, 92), (109, 93), (109, 94), (109, 95), (109, 96), (109, 97), (109, 98), (109, 99), (109, 100), (109, 101), (109, 102), (109, 103),
(109, 104), (109, 105), (109, 106), (109, 107), (109, 108), (109, 109), (109, 110), (109, 111), (109, 112), (109, 113), (109, 114), (109, 115), (109, 116), (109, 117), (109, 118), (109, 119), (109, 120), (109, 121), (109, 122), (109, 123), (109, 124), (109, 125), (109, 126), (109, 127), (109, 129), (110, 87), (110, 89), (110, 90), (110, 91), (110, 92), (110, 93), (110, 94), (110, 95), (110, 96), (110, 97), (110, 98), (110, 99), (110, 100), (110, 101), (110, 102), (110, 103), (110, 104), (110, 105), (110, 106), (110, 107), (110, 108), (110, 109), (110, 110), (110, 111), (110, 112), (110, 113), (110, 114), (110, 115), (110, 116), (110, 117), (110, 118), (110, 119), (110, 120), (110, 121), (110, 122), (110, 123), (110, 124), (110, 125), (110, 126), (110, 127), (110, 128), (110, 130), (111, 86), (111, 88), (111, 89), (111, 90), (111, 91),
(111, 92), (111, 93), (111, 94), (111, 95), (111, 96), (111, 97), (111, 98), (111, 99), (111, 100), (111, 101), (111, 102), (111, 103), (111, 104), (111, 105), (111, 106), (111, 107), (111, 108), (111, 109), (111, 110), (111, 111), (111, 112), (111, 113), (111, 114), (111, 115), (111, 116), (111, 117), (111, 118), (111, 119), (111, 120), (111, 121), (111, 122), (111, 123), (111, 124), (111, 125), (111, 126), (111, 127), (111, 128), (111, 129), (111, 130), (112, 86), (112, 88), (112, 89), (112, 90), (112, 91), (112, 92), (112, 93), (112, 94), (112, 95), (112, 96), (112, 97), (112, 98), (112, 99), (112, 100), (112, 101), (112, 102), (112, 103), (112, 104), (112, 105), (112, 106), (112, 107), (112, 108), (112, 109), (112, 110), (112, 111), (112, 112), (112, 113), (112, 114), (112, 115), (112, 116), (112, 117), (112, 118), (112, 119),
(112, 120), (112, 121), (112, 122), (112, 123), (112, 124), (112, 125), (112, 126), (112, 127), (112, 128), (112, 129), (112, 131), (113, 86), (113, 88), (113, 89), (113, 90), (113, 91), (113, 92), (113, 93), (113, 94), (113, 95), (113, 96), (113, 97), (113, 98), (113, 99), (113, 100), (113, 101), (113, 102), (113, 103), (113, 104), (113, 105), (113, 106), (113, 107), (113, 108), (113, 109), (113, 110), (113, 111), (113, 112), (113, 113), (113, 114), (113, 115), (113, 116), (113, 117), (113, 118), (113, 119), (113, 120), (113, 121), (113, 122), (113, 123), (113, 124), (113, 125), (113, 126), (113, 127), (113, 128), (113, 129), (113, 130), (113, 132), (114, 87), (114, 89), (114, 90), (114, 91), (114, 92), (114, 93), (114, 94), (114, 95), (114, 96), (114, 97), (114, 98), (114, 99), (114, 100), (114, 101), (114, 102), (114, 103),
(114, 104), (114, 105), (114, 106), (114, 107), (114, 108), (114, 109), (114, 110), (114, 111), (114, 112), (114, 113), (114, 114), (114, 115), (114, 116), (114, 117), (114, 118), (114, 119), (114, 120), (114, 121), (114, 122), (114, 123), (114, 124), (114, 125), (114, 126), (114, 127), (114, 128), (114, 129), (114, 130), (114, 132), (115, 88), (115, 90), (115, 91), (115, 92), (115, 93), (115, 94), (115, 95), (115, 96), (115, 97), (115, 98), (115, 99), (115, 100), (115, 101), (115, 102), (115, 103), (115, 104), (115, 105), (115, 106), (115, 107), (115, 108), (115, 109), (115, 110), (115, 111), (115, 112), (115, 113), (115, 114), (115, 115), (115, 116), (115, 117), (115, 118), (115, 119), (115, 120), (115, 121), (115, 122), (115, 123), (115, 124), (115, 125), (115, 126), (115, 127), (115, 128), (115, 129), (115, 130), (115, 131), (115, 133),
(116, 88), (116, 89), (116, 90), (116, 91), (116, 92), (116, 93), (116, 94), (116, 95), (116, 96), (116, 97), (116, 98), (116, 99), (116, 100), (116, 101), (116, 102), (116, 103), (116, 104), (116, 105), (116, 106), (116, 107), (116, 108), (116, 109), (116, 110), (116, 111), (116, 112), (116, 113), (116, 114), (116, 115), (116, 116), (116, 117), (116, 118), (116, 119), (116, 120), (116, 121), (116, 122), (116, 123), (116, 124), (116, 125), (116, 126), (116, 127), (116, 128), (116, 129), (116, 130), (116, 131), (116, 132), (116, 134), (117, 89), (117, 91), (117, 92), (117, 93), (117, 94), (117, 95), (117, 96), (117, 97), (117, 98), (117, 99), (117, 100), (117, 101), (117, 102), (117, 103), (117, 104), (117, 105), (117, 106), (117, 107), (117, 108), (117, 109), (117, 110), (117, 114), (117, 115), (117, 116), (117, 117), (117, 118),
(117, 119), (117, 120), (117, 121), (117, 122), (117, 123), (117, 124), (117, 125), (117, 126), (117, 127), (117, 128), (117, 129), (117, 130), (117, 131), (117, 132), (117, 133), (117, 135), (118, 89), (118, 91), (118, 92), (118, 93), (118, 94), (118, 95), (118, 96), (118, 97), (118, 98), (118, 99), (118, 100), (118, 101), (118, 102), (118, 103), (118, 104), (118, 105), (118, 106), (118, 107), (118, 108), (118, 109), (118, 111), (118, 112), (118, 113), (118, 115), (118, 116), (118, 117), (118, 118), (118, 119), (118, 120), (118, 121), (118, 122), (118, 123), (118, 124), (118, 125), (118, 126), (118, 127), (118, 128), (118, 129), (118, 130), (118, 131), (118, 132), (118, 133), (118, 134), (118, 136), (119, 90), (119, 92), (119, 93), (119, 94), (119, 95), (119, 96), (119, 97), (119, 98), (119, 99), (119, 100), (119, 101), (119, 102),
(119, 103), (119, 104), (119, 105), (119, 106), (119, 107), (119, 108), (119, 110), (119, 114), (119, 116), (119, 117), (119, 118), (119, 119), (119, 120), (119, 121), (119, 122), (119, 123), (119, 124), (119, 125), (119, 126), (119, 127), (119, 128), (119, 129), (119, 130), (119, 131), (119, 132), (119, 133), (119, 134), (119, 135), (119, 137), (120, 90), (120, 92), (120, 93), (120, 94), (120, 95), (120, 96), (120, 97), (120, 98), (120, 99), (120, 100), (120, 101), (120, 102), (120, 103), (120, 104), (120, 105), (120, 106), (120, 107), (120, 109), (120, 115), (120, 117), (120, 118), (120, 119), (120, 120), (120, 121), (120, 122), (120, 123), (120, 124), (120, 125), (120, 126), (120, 127), (120, 128), (120, 129), (120, 130), (120, 131), (120, 132), (120, 133), (120, 134), (120, 135), (120, 136), (120, 140), (121, 90), (121, 92), (121, 93),
(121, 94), (121, 95), (121, 96), (121, 97), (121, 98), (121, 99), (121, 100), (121, 101), (121, 102), (121, 103), (121, 104), (121, 105), (121, 106), (121, 108), (121, 116), (121, 118), (121, 119), (121, 120), (121, 121), (121, 122), (121, 123), (121, 124), (121, 125), (121, 126), (121, 127), (121, 128), (121, 129), (121, 130), (121, 131), (121, 132), (121, 133), (121, 134), (121, 135), (121, 136), (121, 137), (121, 140), (122, 91), (122, 93), (122, 94), (122, 95), (122, 96), (122, 97), (122, 98), (122, 99), (122, 100), (122, 101), (122, 102), (122, 103), (122, 104), (122, 105), (122, 107), (122, 116), (122, 118), (122, 119), (122, 120), (122, 121), (122, 122), (122, 123), (122, 124), (122, 125), (122, 126), (122, 127), (122, 128), (122, 129), (122, 130), (122, 131), (122, 132), (122, 133), (122, 134), (122, 135), (122, 136), (122, 137),
(122, 138), (122, 139), (122, 141), (123, 91), (123, 93), (123, 94), (123, 95), (123, 96), (123, 97), (123, 98), (123, 99), (123, 100), (123, 101), (123, 102), (123, 103), (123, 104), (123, 105), (123, 107), (123, 116), (123, 117), (123, 118), (123, 119), (123, 120), (123, 121), (123, 122), (123, 123), (123, 124), (123, 125), (123, 126), (123, 127), (123, 128), (123, 129), (123, 130), (123, 131), (123, 132), (123, 133), (123, 134), (123, 135), (123, 136), (123, 137), (123, 138), (123, 139), (123, 141), (124, 91), (124, 93), (124, 94), (124, 95), (124, 96), (124, 97), (124, 98), (124, 99), (124, 100), (124, 101), (124, 102), (124, 103), (124, 104), (124, 106), (124, 117), (124, 119), (124, 120), (124, 121), (124, 122), (124, 123), (124, 124), (124, 125), (124, 126), (124, 127), (124, 128), (124, 129), (124, 130), (124, 131), (124, 132),
(124, 133), (124, 134), (124, 135), (124, 136), (124, 137), (124, 138), (124, 139), (124, 140), (124, 142), (125, 91), (125, 92), (125, 93), (125, 94), (125, 95), (125, 96), (125, 97), (125, 98), (125, 99), (125, 100), (125, 101), (125, 102), (125, 103), (125, 104), (125, 105), (125, 106), (125, 117), (125, 119), (125, 120), (125, 121), (125, 122), (125, 123), (125, 124), (125, 125), (125, 126), (125, 127), (125, 128), (125, 129), (125, 130), (125, 131), (125, 132), (125, 133), (125, 134), (125, 135), (125, 136), (125, 137), (125, 138), (125, 139), (125, 140), (125, 141), (125, 143), (126, 92), (126, 94), (126, 95), (126, 96), (126, 97), (126, 98), (126, 99), (126, 100), (126, 101), (126, 102), (126, 103), (126, 105), (126, 117), (126, 119), (126, 120), (126, 121), (126, 122), (126, 123), (126, 124), (126, 125), (126, 126), (126, 127),
(126, 128), (126, 129), (126, 130), (126, 131), (126, 132), (126, 133), (126, 134), (126, 135), (126, 136), (126, 137), (126, 138), (126, 139), (126, 140), (126, 141), (126, 143), (127, 92), (127, 94), (127, 95), (127, 96), (127, 97), (127, 98), (127, 99), (127, 100), (127, 101), (127, 102), (127, 103), (127, 105), (127, 118), (127, 120), (127, 121), (127, 122), (127, 123), (127, 124), (127, 125), (127, 126), (127, 127), (127, 128), (127, 129), (127, 130), (127, 131), (127, 132), (127, 133), (127, 134), (127, 135), (127, 136), (127, 137), (127, 138), (127, 139), (127, 140), (127, 141), (127, 142), (127, 144), (128, 92), (128, 94), (128, 95), (128, 96), (128, 97), (128, 98), (128, 99), (128, 100), (128, 101), (128, 102), (128, 104), (128, 118), (128, 120), (128, 121), (128, 122), (128, 123), (128, 124), (128, 125), (128, 126), (128, 127),
(128, 128), (128, 129), (128, 130), (128, 131), (128, 132), (128, 133), (128, 134), (128, 135), (128, 136), (128, 137), (128, 138), (128, 139), (128, 140), (128, 141), (128, 142), (128, 144), (129, 92), (129, 94), (129, 95), (129, 96), (129, 97), (129, 98), (129, 99), (129, 100), (129, 101), (129, 102), (129, 104), (129, 118), (129, 120), (129, 121), (129, 122), (129, 123), (129, 124), (129, 125), (129, 126), (129, 127), (129, 128), (129, 129), (129, 130), (129, 131), (129, 132), (129, 133), (129, 134), (129, 135), (129, 136), (129, 137), (129, 138), (129, 139), (129, 140), (129, 141), (129, 142), (129, 143), (129, 145), (130, 92), (130, 94), (130, 95), (130, 96), (130, 97), (130, 98), (130, 99), (130, 100), (130, 101), (130, 102), (130, 104), (130, 119), (130, 121), (130, 122), (130, 123), (130, 124), (130, 125), (130, 126), (130, 127),
(130, 128), (130, 129), (130, 130), (130, 131), (130, 132), (130, 133), (130, 134), (130, 135), (130, 136), (130, 137), (130, 138), (130, 139), (130, 140), (130, 141), (130, 145), (131, 92), (131, 94), (131, 95), (131, 96), (131, 97), (131, 98), (131, 99), (131, 100), (131, 101), (131, 103), (131, 119), (131, 121), (131, 122), (131, 123), (131, 124), (131, 125), (131, 126), (131, 127), (131, 128), (131, 129), (131, 130), (131, 131), (131, 132), (131, 133), (131, 134), (131, 135), (131, 136), (131, 137), (131, 138), (131, 139), (131, 142), (131, 143), (132, 92), (132, 94), (132, 95), (132, 96), (132, 97), (132, 98), (132, 99), (132, 100), (132, 101), (132, 103), (132, 122), (132, 123), (132, 124), (132, 125), (132, 126), (132, 127), (132, 128), (132, 129), (132, 130), (132, 131), (132, 132), (132, 133), (132, 134), (132, 135), (132, 136),
(132, 137), (132, 140), (133, 92), (133, 94), (133, 95), (133, 96), (133, 97), (133, 98), (133, 99), (133, 100), (133, 101), (133, 102), (133, 120), (133, 124), (133, 125), (133, 126), (133, 127), (133, 128), (133, 129), (133, 130), (133, 131), (133, 132), (133, 133), (133, 134), (133, 135), (133, 136), (133, 139), (134, 92), (134, 94), (134, 95), (134, 96), (134, 97), (134, 98), (134, 99), (134, 100), (134, 102), (134, 122), (134, 127), (134, 128), (134, 129), (134, 130), (134, 131), (134, 132), (134, 133), (134, 134), (134, 137), (135, 92), (135, 94), (135, 95), (135, 96), (135, 97), (135, 98), (135, 99), (135, 100), (135, 102), (135, 124), (135, 126), (135, 135), (135, 136), (136, 92), (136, 94), (136, 95), (136, 96), (136, 97), (136, 98), (136, 99), (136, 101), (136, 127), (136, 128), (136, 129), (136, 130), (136, 131),
(136, 132), (136, 133), (137, 92), (137, 94), (137, 95), (137, 96), (137, 97), (137, 98), (137, 99), (137, 101), (138, 92), (138, 94), (138, 95), (138, 96), (138, 97), (138, 98), (138, 99), (138, 101), (139, 92), (139, 94), (139, 95), (139, 96), (139, 97), (139, 98), (139, 100), (140, 92), (140, 94), (140, 95), (140, 96), (140, 97), (140, 98), (140, 100), (141, 92), (141, 94), (141, 95), (141, 96), (141, 97), (141, 99), (142, 92), (142, 94), (142, 95), (142, 96), (142, 97), (142, 99), (143, 92), (143, 94), (143, 95), (143, 96), (143, 98), (144, 92), (144, 97), (145, 93), (145, 96), (299, 91), (300, 90), (300, 92), (300, 93), (300, 95), (301, 89), (301, 91), (301, 96), (302, 88), (302, 90), (302, 91), (302, 92), (302, 93), (302, 94), (302, 95), (302, 97), (303, 87), (303, 89), (303, 90),
(303, 91), (303, 92), (303, 93), (303, 94), (303, 95), (303, 96), (303, 98), (304, 87), (304, 89), (304, 90), (304, 91), (304, 92), (304, 93), (304, 94), (304, 95), (304, 96), (304, 98), (305, 86), (305, 88), (305, 89), (305, 90), (305, 91), (305, 92), (305, 93), (305, 94), (305, 95), (305, 96), (305, 97), (305, 99), (306, 86), (306, 88), (306, 89), (306, 90), (306, 91), (306, 92), (306, 93), (306, 94), (306, 95), (306, 96), (306, 97), (306, 99), (307, 86), (307, 88), (307, 89), (307, 90), (307, 91), (307, 92), (307, 93), (307, 94), (307, 95), (307, 96), (307, 97), (307, 99), (308, 88), (308, 90), (308, 91), (308, 92), (308, 93), (308, 94), (308, 95), (308, 96), (308, 97), (308, 98), (308, 100), (309, 88), (309, 90), (309, 91), (309, 92), (309, 93), (309, 94), (309, 95), (309, 96),
(309, 97), (309, 98), (309, 100), (310, 88), (310, 90), (310, 91), (310, 92), (310, 93), (310, 94), (310, 95), (310, 96), (310, 97), (310, 98), (310, 100), (310, 124), (310, 125), (310, 126), (310, 127), (311, 88), (311, 90), (311, 91), (311, 92), (311, 93), (311, 94), (311, 95), (311, 96), (311, 97), (311, 98), (311, 100), (311, 121), (311, 123), (311, 128), (311, 129), (311, 130), (311, 131), (311, 132), (312, 88), (312, 90), (312, 91), (312, 92), (312, 93), (312, 94), (312, 95), (312, 96), (312, 97), (312, 98), (312, 99), (312, 101), (312, 120), (312, 124), (312, 125), (312, 126), (312, 127), (312, 133), (312, 134), (312, 135), (313, 88), (313, 90), (313, 91), (313, 92), (313, 93), (313, 94), (313, 95), (313, 96), (313, 97), (313, 98), (313, 99), (313, 101), (313, 119), (313, 121), (313, 122), (313, 123),
(313, 124), (313, 125), (313, 126), (313, 127), (313, 128), (313, 129), (313, 130), (313, 131), (313, 132), (313, 136), (313, 137), (313, 138), (314, 88), (314, 90), (314, 91), (314, 92), (314, 93), (314, 94), (314, 95), (314, 96), (314, 97), (314, 98), (314, 99), (314, 101), (314, 118), (314, 120), (314, 121), (314, 122), (314, 123), (314, 124), (314, 125), (314, 126), (314, 127), (314, 128), (314, 129), (314, 130), (314, 131), (314, 132), (314, 133), (314, 134), (314, 135), (314, 140), (314, 142), (315, 87), (315, 89), (315, 90), (315, 91), (315, 92), (315, 93), (315, 94), (315, 95), (315, 96), (315, 97), (315, 98), (315, 99), (315, 101), (315, 118), (315, 120), (315, 121), (315, 122), (315, 123), (315, 124), (315, 125), (315, 126), (315, 127), (315, 128), (315, 129), (315, 130), (315, 131), (315, 132), (315, 133), (315, 134),
(315, 135), (315, 136), (315, 137), (315, 138), (315, 139), (315, 142), (316, 87), (316, 89), (316, 90), (316, 91), (316, 92), (316, 93), (316, 94), (316, 95), (316, 96), (316, 97), (316, 98), (316, 99), (316, 100), (316, 101), (316, 102), (316, 117), (316, 119), (316, 120), (316, 121), (316, 122), (316, 123), (316, 124), (316, 125), (316, 126), (316, 127), (316, 128), (316, 129), (316, 130), (316, 131), (316, 132), (316, 133), (316, 134), (316, 135), (316, 136), (316, 137), (316, 138), (316, 139), (316, 140), (316, 142), (317, 87), (317, 89), (317, 90), (317, 91), (317, 92), (317, 93), (317, 94), (317, 95), (317, 96), (317, 97), (317, 98), (317, 99), (317, 100), (317, 102), (317, 117), (317, 119), (317, 120), (317, 121), (317, 122), (317, 123), (317, 124), (317, 125), (317, 126), (317, 127), (317, 128), (317, 129), (317, 130),
(317, 131), (317, 132), (317, 133), (317, 134), (317, 135), (317, 136), (317, 137), (317, 138), (317, 139), (317, 141), (318, 86), (318, 88), (318, 89), (318, 90), (318, 91), (318, 92), (318, 93), (318, 94), (318, 95), (318, 96), (318, 97), (318, 98), (318, 99), (318, 100), (318, 102), (318, 116), (318, 118), (318, 119), (318, 120), (318, 121), (318, 122), (318, 123), (318, 124), (318, 125), (318, 126), (318, 127), (318, 128), (318, 129), (318, 130), (318, 131), (318, 132), (318, 133), (318, 134), (318, 135), (318, 136), (318, 137), (318, 138), (318, 139), (318, 141), (319, 86), (319, 88), (319, 89), (319, 90), (319, 91), (319, 92), (319, 93), (319, 94), (319, 95), (319, 96), (319, 97), (319, 98), (319, 99), (319, 100), (319, 102), (319, 116), (319, 118), (319, 119), (319, 120), (319, 121), (319, 122), (319, 123), (319, 124),
(319, 125), (319, 126), (319, 127), (319, 128), (319, 129), (319, 130), (319, 131), (319, 132), (319, 133), (319, 134), (319, 135), (319, 136), (319, 137), (319, 138), (319, 140), (320, 85), (320, 87), (320, 88), (320, 89), (320, 90), (320, 91), (320, 92), (320, 93), (320, 94), (320, 95), (320, 96), (320, 97), (320, 98), (320, 99), (320, 100), (320, 101), (320, 103), (320, 115), (320, 117), (320, 118), (320, 119), (320, 120), (320, 121), (320, 122), (320, 123), (320, 124), (320, 125), (320, 126), (320, 127), (320, 128), (320, 129), (320, 130), (320, 131), (320, 132), (320, 133), (320, 134), (320, 135), (320, 136), (320, 137), (320, 138), (320, 140), (321, 85), (321, 87), (321, 88), (321, 89), (321, 90), (321, 91), (321, 92), (321, 93), (321, 94), (321, 95), (321, 96), (321, 97), (321, 98), (321, 99), (321, 100), (321, 101),
(321, 103), (321, 115), (321, 117), (321, 118), (321, 119), (321, 120), (321, 121), (321, 122), (321, 123), (321, 124), (321, 125), (321, 126), (321, 127), (321, 128), (321, 129), (321, 130), (321, 131), (321, 132), (321, 133), (321, 134), (321, 135), (321, 136), (321, 137), (321, 139), (322, 85), (322, 87), (322, 88), (322, 89), (322, 90), (322, 91), (322, 92), (322, 93), (322, 94), (322, 95), (322, 96), (322, 97), (322, 98), (322, 99), (322, 100), (322, 101), (322, 102), (322, 104), (322, 114), (322, 116), (322, 117), (322, 118), (322, 119), (322, 120), (322, 121), (322, 122), (322, 123), (322, 124), (322, 125), (322, 126), (322, 127), (322, 128), (322, 129), (322, 130), (322, 131), (322, 132), (322, 133), (322, 134), (322, 135), (322, 136), (322, 137), (322, 139), (323, 84), (323, 86), (323, 87), (323, 88), (323, 89), (323, 90),
(323, 91), (323, 92), (323, 93), (323, 94), (323, 95), (323, 96), (323, 97), (323, 98), (323, 99), (323, 100), (323, 101), (323, 102), (323, 104), (323, 114), (323, 116), (323, 117), (323, 118), (323, 119), (323, 120), (323, 121), (323, 122), (323, 123), (323, 124), (323, 125), (323, 126), (323, 127), (323, 128), (323, 129), (323, 130), (323, 131), (323, 132), (323, 133), (323, 134), (323, 135), (323, 136), (323, 137), (323, 139), (324, 84), (324, 86), (324, 87), (324, 88), (324, 89), (324, 90), (324, 91), (324, 92), (324, 93), (324, 94), (324, 95), (324, 96), (324, 97), (324, 98), (324, 99), (324, 100), (324, 101), (324, 102), (324, 103), (324, 105), (324, 113), (324, 115), (324, 116), (324, 117), (324, 118), (324, 119), (324, 120), (324, 121), (324, 122), (324, 123), (324, 124), (324, 125), (324, 126), (324, 127), (324, 128),
(324, 129), (324, 130), (324, 131), (324, 132), (324, 133), (324, 134), (324, 135), (324, 136), (324, 138), (325, 83), (325, 84), (325, 85), (325, 86), (325, 87), (325, 88), (325, 89), (325, 90), (325, 91), (325, 92), (325, 93), (325, 94), (325, 95), (325, 96), (325, 97), (325, 98), (325, 99), (325, 100), (325, 101), (325, 102), (325, 103), (325, 104), (325, 106), (325, 112), (325, 114), (325, 115), (325, 116), (325, 117), (325, 118), (325, 119), (325, 120), (325, 121), (325, 122), (325, 123), (325, 124), (325, 125), (325, 126), (325, 127), (325, 128), (325, 129), (325, 130), (325, 131), (325, 132), (325, 133), (325, 134), (325, 135), (325, 136), (325, 138), (326, 83), (326, 85), (326, 86), (326, 87), (326, 88), (326, 89), (326, 90), (326, 91), (326, 92), (326, 93), (326, 94), (326, 95), (326, 96), (326, 97), (326, 98),
(326, 99), (326, 100), (326, 101), (326, 102), (326, 103), (326, 104), (326, 105), (326, 107), (326, 108), (326, 111), (326, 113), (326, 114), (326, 115), (326, 116), (326, 117), (326, 118), (326, 119), (326, 120), (326, 121), (326, 122), (326, 123), (326, 124), (326, 125), (326, 126), (326, 127), (326, 128), (326, 129), (326, 130), (326, 131), (326, 132), (326, 133), (326, 134), (326, 135), (326, 137), (327, 83), (327, 85), (327, 86), (327, 87), (327, 88), (327, 89), (327, 90), (327, 91), (327, 92), (327, 93), (327, 94), (327, 95), (327, 96), (327, 97), (327, 98), (327, 99), (327, 100), (327, 101), (327, 102), (327, 103), (327, 104), (327, 105), (327, 106), (327, 109), (327, 112), (327, 113), (327, 114), (327, 115), (327, 116), (327, 117), (327, 118), (327, 119), (327, 120), (327, 121), (327, 122), (327, 123), (327, 124), (327, 125),
(327, 126), (327, 127), (327, 128), (327, 129), (327, 130), (327, 131), (327, 132), (327, 133), (327, 134), (327, 135), (327, 137), (328, 83), (328, 85), (328, 86), (328, 87), (328, 88), (328, 89), (328, 90), (328, 91), (328, 92), (328, 93), (328, 94), (328, 95), (328, 96), (328, 97), (328, 98), (328, 99), (328, 100), (328, 101), (328, 102), (328, 103), (328, 104), (328, 105), (328, 106), (328, 107), (328, 108), (328, 110), (328, 111), (328, 112), (328, 113), (328, 114), (328, 115), (328, 116), (328, 117), (328, 118), (328, 119), (328, 120), (328, 121), (328, 122), (328, 123), (328, 124), (328, 125), (328, 126), (328, 127), (328, 128), (328, 129), (328, 130), (328, 131), (328, 132), (328, 133), (328, 134), (328, 136), (329, 83), (329, 85), (329, 86), (329, 87), (329, 88), (329, 89), (329, 90), (329, 91), (329, 92), (329, 93),
(329, 94), (329, 95), (329, 96), (329, 97), (329, 98), (329, 99), (329, 100), (329, 101), (329, 102), (329, 103), (329, 104), (329, 105), (329, 106), (329, 107), (329, 108), (329, 109), (329, 110), (329, 111), (329, 112), (329, 113), (329, 114), (329, 115), (329, 116), (329, 117), (329, 118), (329, 119), (329, 120), (329, 121), (329, 122), (329, 123), (329, 124), (329, 125), (329, 126), (329, 127), (329, 128), (329, 129), (329, 130), (329, 131), (329, 132), (329, 133), (329, 135), (330, 83), (330, 85), (330, 86), (330, 87), (330, 88), (330, 89), (330, 90), (330, 91), (330, 92), (330, 93), (330, 94), (330, 95), (330, 96), (330, 97), (330, 98), (330, 99), (330, 100), (330, 101), (330, 102), (330, 103), (330, 104), (330, 105), (330, 106), (330, 107), (330, 108), (330, 109), (330, 110), (330, 111), (330, 112), (330, 113), (330, 114),
(330, 115), (330, 116), (330, 117), (330, 118), (330, 119), (330, 120), (330, 121), (330, 122), (330, 123), (330, 124), (330, 125), (330, 126), (330, 127), (330, 128), (330, 129), (330, 130), (330, 131), (330, 132), (330, 134), (331, 83), (331, 85), (331, 86), (331, 87), (331, 88), (331, 89), (331, 90), (331, 91), (331, 92), (331, 93), (331, 94), (331, 95), (331, 96), (331, 97), (331, 98), (331, 99), (331, 100), (331, 101), (331, 102), (331, 103), (331, 104), (331, 105), (331, 106), (331, 107), (331, 108), (331, 109), (331, 110), (331, 111), (331, 112), (331, 113), (331, 114), (331, 115), (331, 116), (331, 117), (331, 118), (331, 119), (331, 120), (331, 121), (331, 122), (331, 123), (331, 124), (331, 125), (331, 126), (331, 127), (331, 128), (331, 129), (331, 130), (331, 131), (331, 133), (332, 83), (332, 85), (332, 86), (332, 87),
(332, 88), (332, 89), (332, 90), (332, 91), (332, 92), (332, 93), (332, 94), (332, 95), (332, 96), (332, 97), (332, 98), (332, 99), (332, 100), (332, 101), (332, 102), (332, 103), (332, 104), (332, 105), (332, 106), (332, 107), (332, 108), (332, 109), (332, 110), (332, 111), (332, 112), (332, 113), (332, 114), (332, 115), (332, 116), (332, 117), (332, 118), (332, 119), (332, 120), (332, 121), (332, 122), (332, 123), (332, 124), (332, 125), (332, 126), (332, 127), (332, 128), (332, 129), (332, 130), (332, 131), (333, 84), (333, 86), (333, 87), (333, 88), (333, 89), (333, 90), (333, 91), (333, 92), (333, 93), (333, 94), (333, 95), (333, 96), (333, 97), (333, 98), (333, 99), (333, 100), (333, 101), (333, 102), (333, 103), (333, 104), (333, 105), (333, 106), (333, 107), (333, 108), (333, 109), (333, 110), (333, 111), (333, 112),
(333, 113), (333, 114), (333, 115), (333, 116), (333, 117), (333, 118), (333, 119), (333, 120), (333, 121), (333, 122), (333, 123), (333, 124), (333, 125), (333, 126), (333, 127), (333, 128), (333, 129), (333, 130), (333, 132), (334, 84), (334, 86), (334, 87), (334, 88), (334, 89), (334, 90), (334, 91), (334, 92), (334, 93), (334, 94), (334, 95), (334, 96), (334, 97), (334, 98), (334, 99), (334, 100), (334, 101), (334, 102), (334, 103), (334, 104), (334, 105), (334, 106), (334, 107), (334, 108), (334, 109), (334, 110), (334, 111), (334, 112), (334, 113), (334, 114), (334, 115), (334, 116), (334, 117), (334, 118), (334, 119), (334, 120), (334, 121), (334, 122), (334, 123), (334, 124), (334, 125), (334, 126), (334, 127), (334, 128), (334, 129), (334, 131), (335, 85), (335, 87), (335, 88), (335, 89), (335, 90), (335, 91), (335, 92),
(335, 93), (335, 94), (335, 95), (335, 96), (335, 97), (335, 98), (335, 99), (335, 100), (335, 101), (335, 102), (335, 103), (335, 104), (335, 105), (335, 106), (335, 107), (335, 108), (335, 109), (335, 110), (335, 111), (335, 112), (335, 113), (335, 114), (335, 115), (335, 116), (335, 117), (335, 118), (335, 119), (335, 120), (335, 121), (335, 122), (335, 123), (335, 124), (335, 125), (335, 126), (335, 127), (335, 128), (335, 129), (335, 131), (336, 86), (336, 88), (336, 89), (336, 90), (336, 91), (336, 92), (336, 93), (336, 94), (336, 95), (336, 96), (336, 97), (336, 98), (336, 99), (336, 100), (336, 101), (336, 102), (336, 103), (336, 104), (336, 105), (336, 106), (336, 107), (336, 108), (336, 109), (336, 110), (336, 111), (336, 112), (336, 113), (336, 114), (336, 115), (336, 116), (336, 117), (336, 118), (336, 119), (336, 120),
(336, 121), (336, 122), (336, 123), (336, 124), (336, 125), (336, 126), (336, 127), (336, 128), (336, 130), (337, 87), (337, 89), (337, 90), (337, 91), (337, 92), (337, 93), (337, 94), (337, 95), (337, 96), (337, 97), (337, 98), (337, 99), (337, 100), (337, 101), (337, 102), (337, 103), (337, 104), (337, 105), (337, 106), (337, 107), (337, 108), (337, 109), (337, 110), (337, 111), (337, 112), (337, 113), (337, 114), (337, 115), (337, 116), (337, 117), (337, 118), (337, 119), (337, 120), (337, 121), (337, 122), (337, 123), (337, 124), (337, 125), (337, 126), (337, 127), (337, 128), (337, 129), (338, 88), (338, 90), (338, 91), (338, 92), (338, 93), (338, 94), (338, 95), (338, 96), (338, 97), (338, 98), (338, 99), (338, 100), (338, 101), (338, 102), (338, 103), (338, 104), (338, 105), (338, 106), (338, 107), (338, 108), (338, 109),
(338, 110), (338, 111), (338, 112), (338, 113), (338, 114), (338, 115), (338, 116), (338, 117), (338, 118), (338, 119), (338, 120), (338, 121), (338, 122), (338, 123), (338, 124), (338, 125), (338, 126), (338, 127), (338, 129), (339, 89), (339, 92), (339, 93), (339, 94), (339, 95), (339, 96), (339, 97), (339, 98), (339, 99), (339, 100), (339, 101), (339, 102), (339, 103), (339, 104), (339, 105), (339, 106), (339, 107), (339, 108), (339, 109), (339, 110), (339, 111), (339, 112), (339, 113), (339, 114), (339, 115), (339, 116), (339, 117), (339, 118), (339, 119), (339, 120), (339, 121), (339, 122), (339, 123), (339, 124), (339, 125), (339, 126), (339, 127), (339, 128), (340, 90), (340, 93), (340, 94), (340, 95), (340, 96), (340, 97), (340, 98), (340, 99), (340, 100), (340, 101), (340, 102), (340, 103), (340, 104), (340, 105), (340, 106),
(340, 107), (340, 108), (340, 109), (340, 110), (340, 111), (340, 112), (340, 113), (340, 114), (340, 115), (340, 116), (340, 117), (340, 118), (340, 119), (340, 120), (340, 121), (340, 122), (340, 123), (340, 124), (340, 125), (340, 126), (340, 128), (341, 91), (341, 92), (341, 95), (341, 96), (341, 97), (341, 98), (341, 99), (341, 100), (341, 101), (341, 102), (341, 103), (341, 104), (341, 105), (341, 106), (341, 107), (341, 108), (341, 109), (341, 110), (341, 111), (341, 112), (341, 113), (341, 114), (341, 115), (341, 116), (341, 117), (341, 118), (341, 119), (341, 120), (341, 121), (341, 122), (341, 123), (341, 124), (341, 125), (341, 128), (342, 93), (342, 96), (342, 97), (342, 98), (342, 99), (342, 100), (342, 101), (342, 102), (342, 103), (342, 104), (342, 105), (342, 106), (342, 107), (342, 108), (342, 109), (342, 110), (342, 111),
(342, 112), (342, 113), (342, 114), (342, 115), (342, 116), (342, 117), (342, 118), (342, 119), (342, 120), (342, 121), (342, 122), (342, 123), (342, 127), (343, 95), (343, 98), (343, 99), (343, 100), (343, 101), (343, 102), (343, 103), (343, 104), (343, 105), (343, 106), (343, 107), (343, 108), (343, 109), (343, 110), (343, 111), (343, 112), (343, 113), (343, 114), (343, 115), (343, 116), (343, 117), (343, 118), (343, 119), (343, 120), (343, 121), (343, 124), (343, 125), (344, 96), (344, 97), (344, 112), (344, 113), (344, 114), (344, 115), (344, 116), (344, 117), (344, 118), (344, 119), (344, 122), (345, 98), (345, 100), (345, 101), (345, 102), (345, 103), (345, 104), (345, 105), (345, 106), (345, 107), (345, 108), (345, 109), (345, 110), (345, 111), (345, 113), (345, 114), (345, 115), (345, 116), (345, 117), (345, 118), (345, 121), (346, 112),
(346, 115), (346, 116), (346, 119), (347, 113), (347, 118), (348, 114), (348, 116), )
coordinates_CC3301 = ((175, 95),
(175, 96), (176, 95), (176, 96), (177, 96), )
coordinates_66FF33 = ((144, 151),
(145, 150), (145, 153), (146, 150), (146, 155), (147, 149), (147, 151), (147, 152), (147, 153), (147, 156), (148, 149), (148, 151), (148, 152), (148, 153), (148, 154), (148, 157), (149, 148), (149, 150), (149, 151), (149, 152), (149, 153), (149, 154), (149, 155), (149, 157), (150, 148), (150, 150), (150, 151), (150, 152), (150, 153), (150, 154), (150, 155), (150, 156), (150, 158), (151, 149), (151, 151), (151, 152), (151, 153), (151, 154), (151, 155), (151, 156), (151, 157), (151, 159), (152, 150), (152, 152), (152, 153), (152, 154), (152, 155), (152, 156), (152, 157), (152, 159), (153, 151), (153, 153), (153, 154), (153, 155), (153, 156), (153, 157), (153, 158), (153, 160), (154, 151), (154, 153), (154, 154), (154, 155), (154, 156), (154, 157), (154, 158), (154, 160), (155, 152), (155, 154), (155, 155), (155, 156), (155, 157), (155, 158), (155, 159),
(155, 161), (156, 153), (156, 155), (156, 156), (156, 157), (156, 158), (156, 159), (156, 161), (157, 154), (157, 156), (157, 157), (157, 158), (157, 159), (157, 160), (157, 162), (158, 155), (158, 157), (158, 158), (158, 159), (158, 160), (158, 162), (159, 156), (159, 158), (159, 159), (159, 160), (159, 161), (159, 163), (160, 156), (160, 159), (160, 160), (160, 161), (160, 163), (161, 157), (161, 159), (161, 160), (161, 161), (161, 162), (161, 164), (162, 158), (162, 160), (162, 161), (162, 162), (162, 164), (163, 159), (163, 161), (163, 162), (163, 163), (163, 165), (164, 160), (164, 162), (164, 163), (164, 165), (165, 161), (165, 163), (165, 164), (165, 166), (166, 162), (166, 164), (166, 166), (167, 163), (167, 165), (167, 167), (168, 164), (168, 167), (169, 165), (169, 168), (170, 166), (170, 168), (171, 167), (171, 169), (172, 169), (173, 170),
(174, 170), (175, 171), (176, 172), (177, 173), (178, 174), (179, 175), (180, 176), (180, 177), (181, 177), (269, 173), (270, 172), (271, 172), (272, 171), (273, 168), (273, 171), (274, 167), (274, 170), (275, 166), (275, 170), (276, 165), (276, 169), (277, 164), (277, 167), (277, 169), (278, 163), (278, 166), (278, 168), (279, 162), (279, 164), (279, 165), (279, 166), (279, 168), (280, 161), (280, 163), (280, 164), (280, 165), (280, 167), (281, 160), (281, 162), (281, 163), (281, 164), (281, 166), (282, 159), (282, 161), (282, 162), (282, 163), (282, 164), (282, 166), (283, 158), (283, 160), (283, 161), (283, 162), (283, 163), (283, 165), (284, 157), (284, 159), (284, 160), (284, 161), (284, 162), (284, 163), (284, 165), (285, 156), (285, 158), (285, 159), (285, 160), (285, 161), (285, 162), (285, 164), (286, 155), (286, 157), (286, 158), (286, 159),
(286, 160), (286, 161), (286, 163), (287, 154), (287, 156), (287, 157), (287, 158), (287, 159), (287, 160), (287, 161), (287, 163), (288, 153), (288, 155), (288, 156), (288, 157), (288, 158), (288, 159), (288, 160), (288, 162), (289, 152), (289, 154), (289, 155), (289, 156), (289, 157), (289, 158), (289, 159), (289, 161), (290, 151), (290, 153), (290, 154), (290, 155), (290, 156), (290, 157), (290, 158), (290, 159), (290, 161), (291, 150), (291, 152), (291, 153), (291, 154), (291, 155), (291, 156), (291, 157), (291, 158), (291, 160), (292, 149), (292, 151), (292, 152), (292, 153), (292, 154), (292, 155), (292, 156), (292, 157), (292, 159), (293, 148), (293, 150), (293, 151), (293, 152), (293, 153), (293, 154), (293, 155), (293, 156), (293, 157), (293, 159), (294, 148), (294, 150), (294, 151), (294, 152), (294, 153), (294, 154), (294, 155), (294, 156),
(294, 158), (295, 148), (295, 150), (295, 151), (295, 152), (295, 153), (295, 154), (295, 155), (295, 157), (296, 149), (296, 151), (296, 152), (296, 153), (296, 154), (296, 155), (296, 157), (297, 149), (297, 151), (297, 152), (297, 153), (297, 154), (297, 156), (298, 149), (298, 151), (298, 152), (298, 153), (298, 155), (299, 150), (299, 152), (299, 154), (300, 150), (300, 152), (300, 154), (301, 150), (301, 153), (302, 151), (302, 152), (303, 151), )
coordinates_0000CD = ((148, 133),
(148, 135), (148, 136), (148, 138), (149, 132), (149, 139), (149, 140), (149, 141), (150, 130), (150, 133), (150, 134), (150, 135), (150, 136), (150, 137), (150, 138), (150, 142), (150, 143), (150, 144), (150, 146), (151, 129), (151, 132), (151, 133), (151, 134), (151, 135), (151, 136), (151, 137), (151, 138), (151, 139), (151, 140), (151, 141), (151, 147), (152, 129), (152, 131), (152, 132), (152, 133), (152, 134), (152, 135), (152, 136), (152, 137), (152, 138), (152, 139), (152, 140), (152, 141), (152, 142), (152, 143), (152, 144), (152, 145), (152, 147), (153, 128), (153, 130), (153, 131), (153, 132), (153, 133), (153, 134), (153, 135), (153, 136), (153, 137), (153, 138), (153, 139), (153, 140), (153, 141), (153, 142), (153, 143), (153, 144), (153, 145), (153, 146), (153, 148), (154, 128), (154, 130), (154, 131), (154, 132), (154, 133), (154, 134),
(154, 135), (154, 136), (154, 137), (154, 138), (154, 139), (154, 140), (154, 141), (154, 142), (154, 143), (154, 144), (154, 145), (154, 146), (154, 147), (154, 149), (155, 127), (155, 129), (155, 130), (155, 131), (155, 132), (155, 133), (155, 134), (155, 135), (155, 136), (155, 137), (155, 138), (155, 139), (155, 140), (155, 141), (155, 142), (155, 143), (155, 144), (155, 145), (155, 146), (155, 147), (155, 148), (155, 150), (156, 134), (156, 135), (156, 136), (156, 137), (156, 138), (156, 139), (156, 140), (156, 141), (156, 142), (156, 143), (156, 144), (156, 145), (156, 146), (156, 147), (156, 148), (156, 149), (156, 151), (157, 127), (157, 129), (157, 130), (157, 131), (157, 132), (157, 133), (157, 135), (157, 136), (157, 137), (157, 138), (157, 139), (157, 140), (157, 141), (157, 142), (157, 143), (157, 144), (157, 145), (157, 146), (157, 147),
(157, 148), (157, 149), (157, 151), (158, 134), (158, 136), (158, 137), (158, 138), (158, 139), (158, 140), (158, 141), (158, 142), (158, 143), (158, 144), (158, 145), (158, 146), (158, 147), (158, 148), (158, 149), (158, 150), (158, 152), (159, 135), (159, 141), (159, 142), (159, 143), (159, 144), (159, 145), (159, 146), (159, 147), (159, 148), (159, 149), (159, 150), (159, 151), (159, 153), (160, 135), (160, 136), (160, 137), (160, 138), (160, 139), (160, 142), (160, 143), (160, 144), (160, 145), (160, 146), (160, 147), (160, 148), (160, 149), (160, 150), (160, 151), (160, 152), (160, 154), (161, 141), (161, 143), (161, 144), (161, 145), (161, 146), (161, 147), (161, 148), (161, 149), (161, 150), (161, 151), (161, 152), (161, 153), (161, 155), (162, 144), (162, 145), (162, 146), (162, 147), (162, 148), (162, 149), (162, 150), (162, 151), (162, 152),
(162, 153), (162, 154), (162, 156), (163, 142), (163, 144), (163, 145), (163, 146), (163, 147), (163, 148), (163, 149), (163, 150), (163, 151), (163, 152), (163, 153), (163, 154), (163, 155), (163, 157), (164, 143), (164, 145), (164, 146), (164, 147), (164, 148), (164, 149), (164, 150), (164, 151), (164, 152), (164, 153), (164, 154), (164, 155), (164, 158), (165, 144), (165, 146), (165, 147), (165, 148), (165, 149), (165, 150), (165, 151), (165, 152), (165, 153), (165, 154), (165, 155), (165, 156), (166, 144), (166, 146), (166, 147), (166, 148), (166, 149), (166, 150), (166, 151), (166, 152), (166, 153), (166, 154), (166, 155), (166, 156), (166, 157), (166, 159), (167, 145), (167, 147), (167, 148), (167, 149), (167, 150), (167, 151), (167, 152), (167, 153), (167, 154), (167, 155), (167, 156), (167, 157), (167, 158), (167, 160), (168, 145), (168, 147),
(168, 148), (168, 149), (168, 150), (168, 151), (168, 152), (168, 153), (168, 154), (168, 155), (168, 156), (168, 157), (168, 158), (168, 159), (168, 161), (169, 146), (169, 148), (169, 149), (169, 150), (169, 151), (169, 152), (169, 153), (169, 154), (169, 155), (169, 156), (169, 157), (169, 158), (169, 159), (169, 160), (169, 162), (170, 146), (170, 148), (170, 149), (170, 150), (170, 151), (170, 152), (170, 153), (170, 154), (170, 155), (170, 156), (170, 157), (170, 158), (170, 159), (170, 160), (170, 161), (170, 163), (171, 147), (171, 149), (171, 150), (171, 151), (171, 152), (171, 153), (171, 154), (171, 155), (171, 156), (171, 157), (171, 158), (171, 159), (171, 160), (171, 161), (171, 162), (171, 164), (172, 147), (172, 149), (172, 150), (172, 151), (172, 152), (172, 153), (172, 154), (172, 155), (172, 156), (172, 157), (172, 158), (172, 159),
(172, 160), (172, 161), (172, 162), (172, 163), (172, 165), (173, 148), (173, 150), (173, 151), (173, 152), (173, 153), (173, 154), (173, 155), (173, 156), (173, 157), (173, 158), (173, 159), (173, 160), (173, 161), (173, 162), (173, 163), (173, 164), (173, 166), (174, 148), (174, 150), (174, 151), (174, 152), (174, 153), (174, 154), (174, 155), (174, 156), (174, 157), (174, 158), (174, 159), (174, 160), (174, 161), (174, 162), (174, 163), (174, 164), (174, 165), (174, 167), (175, 149), (175, 151), (175, 152), (175, 153), (175, 154), (175, 155), (175, 156), (175, 157), (175, 158), (175, 159), (175, 160), (175, 161), (175, 162), (175, 163), (175, 164), (175, 165), (175, 166), (175, 168), (176, 149), (176, 151), (176, 152), (176, 153), (176, 154), (176, 155), (176, 156), (176, 157), (176, 158), (176, 159), (176, 160), (176, 161), (176, 162), (176, 163),
(176, 164), (176, 165), (176, 166), (176, 167), (176, 169), (177, 150), (177, 152), (177, 153), (177, 154), (177, 155), (177, 156), (177, 157), (177, 158), (177, 159), (177, 160), (177, 161), (177, 162), (177, 163), (177, 164), (177, 165), (177, 166), (177, 167), (177, 168), (177, 170), (178, 150), (178, 152), (178, 153), (178, 154), (178, 155), (178, 156), (178, 157), (178, 158), (178, 159), (178, 160), (178, 161), (178, 162), (178, 163), (178, 164), (178, 165), (178, 166), (178, 167), (178, 168), (178, 169), (178, 171), (179, 152), (179, 153), (179, 154), (179, 155), (179, 156), (179, 157), (179, 158), (179, 159), (179, 160), (179, 161), (179, 162), (179, 163), (179, 164), (179, 165), (179, 166), (179, 167), (179, 168), (179, 169), (179, 170), (179, 172), (180, 151), (180, 153), (180, 154), (180, 155), (180, 156), (180, 157), (180, 158), (180, 159),
(180, 160), (180, 161), (180, 162), (180, 163), (180, 164), (180, 165), (180, 166), (180, 167), (180, 168), (180, 169), (180, 170), (180, 171), (181, 152), (181, 154), (181, 155), (181, 156), (181, 157), (181, 158), (181, 159), (181, 160), (181, 161), (181, 162), (181, 163), (181, 164), (181, 165), (181, 166), (181, 167), (181, 168), (181, 169), (181, 170), (181, 171), (181, 172), (181, 175), (182, 153), (182, 155), (182, 156), (182, 157), (182, 158), (182, 159), (182, 160), (182, 161), (182, 162), (182, 163), (182, 164), (182, 165), (182, 166), (182, 167), (182, 168), (182, 169), (182, 170), (182, 171), (182, 172), (182, 173), (182, 176), (183, 153), (183, 155), (183, 156), (183, 157), (183, 158), (183, 159), (183, 160), (183, 161), (183, 162), (183, 163), (183, 164), (183, 165), (183, 166), (183, 167), (183, 168), (183, 169), (183, 170), (183, 171),
(183, 172), (183, 173), (183, 174), (183, 175), (183, 177), (184, 154), (184, 156), (184, 157), (184, 158), (184, 159), (184, 160), (184, 161), (184, 162), (184, 163), (184, 164), (184, 165), (184, 166), (184, 167), (184, 168), (184, 169), (184, 170), (184, 171), (184, 172), (184, 173), (184, 174), (184, 175), (184, 177), (185, 155), (185, 157), (185, 158), (185, 159), (185, 160), (185, 161), (185, 162), (185, 163), (185, 164), (185, 165), (185, 166), (185, 167), (185, 168), (185, 169), (185, 170), (185, 171), (185, 172), (185, 173), (185, 174), (185, 175), (185, 177), (186, 156), (186, 159), (186, 160), (186, 161), (186, 162), (186, 163), (186, 164), (186, 165), (186, 166), (186, 167), (186, 168), (186, 169), (186, 170), (186, 171), (186, 172), (186, 173), (186, 174), (186, 175), (186, 177), (187, 157), (187, 161), (187, 162), (187, 163), (187, 164),
(187, 165), (187, 166), (187, 167), (187, 168), (187, 169), (187, 170), (187, 171), (187, 172), (187, 173), (187, 174), (187, 175), (187, 177), (188, 159), (188, 163), (188, 164), (188, 165), (188, 166), (188, 167), (188, 168), (188, 169), (188, 170), (188, 171), (188, 172), (188, 173), (188, 174), (188, 175), (188, 176), (188, 178), (189, 161), (189, 164), (189, 165), (189, 166), (189, 167), (189, 168), (189, 169), (189, 170), (189, 171), (189, 172), (189, 173), (189, 174), (189, 175), (189, 176), (189, 178), (190, 163), (190, 166), (190, 167), (190, 168), (190, 169), (190, 170), (190, 171), (190, 172), (190, 173), (190, 174), (190, 175), (190, 176), (190, 178), (191, 164), (191, 167), (191, 168), (191, 169), (191, 170), (191, 171), (191, 172), (191, 173), (191, 174), (191, 175), (191, 176), (191, 178), (192, 166), (192, 168), (192, 169), (192, 170),
(192, 171), (192, 172), (192, 173), (192, 174), (192, 175), (192, 176), (192, 178), (193, 167), (193, 169), (193, 170), (193, 171), (193, 172), (193, 173), (193, 174), (193, 175), (193, 176), (193, 177), (193, 179), (194, 168), (194, 171), (194, 172), (194, 173), (194, 174), (194, 175), (194, 176), (194, 177), (194, 179), (195, 169), (195, 172), (195, 173), (195, 174), (195, 175), (195, 176), (195, 177), (195, 179), (196, 170), (196, 172), (196, 173), (196, 174), (196, 175), (196, 176), (196, 177), (196, 179), (197, 171), (197, 173), (197, 174), (197, 175), (197, 176), (197, 177), (197, 178), (197, 180), (198, 172), (198, 177), (198, 178), (198, 180), (199, 173), (199, 175), (199, 176), (199, 180), (200, 177), (200, 180), (201, 180), (243, 177), (243, 180), (244, 175), (244, 180), (245, 173), (245, 174), (245, 177), (245, 178), (245, 180), (246, 170),
(246, 172), (246, 175), (246, 176), (246, 177), (246, 178), (246, 180), (247, 169), (247, 173), (247, 174), (247, 175), (247, 176), (247, 177), (247, 178), (247, 180), (248, 169), (248, 171), (248, 172), (248, 173), (248, 174), (248, 175), (248, 176), (248, 177), (248, 179), (249, 169), (249, 171), (249, 172), (249, 173), (249, 174), (249, 175), (249, 176), (249, 177), (249, 179), (250, 168), (250, 170), (250, 171), (250, 172), (250, 173), (250, 174), (250, 175), (250, 176), (250, 177), (250, 179), (251, 168), (251, 170), (251, 171), (251, 172), (251, 173), (251, 174), (251, 175), (251, 176), (251, 177), (251, 179), (252, 167), (252, 169), (252, 170), (252, 171), (252, 172), (252, 173), (252, 174), (252, 175), (252, 176), (252, 177), (252, 179), (253, 166), (253, 168), (253, 169), (253, 170), (253, 171), (253, 172), (253, 173), (253, 174), (253, 175),
(253, 176), (253, 178), (254, 166), (254, 168), (254, 169), (254, 170), (254, 171), (254, 172), (254, 173), (254, 174), (254, 175), (254, 176), (254, 178), (255, 165), (255, 167), (255, 168), (255, 169), (255, 170), (255, 171), (255, 172), (255, 173), (255, 174), (255, 175), (255, 176), (255, 178), (256, 164), (256, 166), (256, 167), (256, 168), (256, 169), (256, 170), (256, 171), (256, 172), (256, 173), (256, 174), (256, 175), (256, 176), (256, 178), (257, 163), (257, 165), (257, 166), (257, 167), (257, 168), (257, 169), (257, 170), (257, 171), (257, 172), (257, 173), (257, 174), (257, 175), (257, 176), (257, 178), (258, 162), (258, 164), (258, 165), (258, 166), (258, 167), (258, 168), (258, 169), (258, 170), (258, 171), (258, 172), (258, 173), (258, 174), (258, 175), (258, 176), (258, 178), (259, 161), (259, 163), (259, 164), (259, 165), (259, 166),
(259, 167), (259, 168), (259, 169), (259, 170), (259, 171), (259, 172), (259, 173), (259, 174), (259, 175), (259, 177), (260, 159), (260, 162), (260, 163), (260, 164), (260, 165), (260, 166), (260, 167), (260, 168), (260, 169), (260, 170), (260, 171), (260, 172), (260, 173), (260, 174), (260, 175), (260, 177), (261, 157), (261, 160), (261, 161), (261, 162), (261, 163), (261, 164), (261, 165), (261, 166), (261, 167), (261, 168), (261, 169), (261, 170), (261, 171), (261, 172), (261, 173), (261, 174), (261, 175), (261, 177), (262, 156), (262, 158), (262, 159), (262, 160), (262, 161), (262, 162), (262, 163), (262, 164), (262, 165), (262, 166), (262, 167), (262, 168), (262, 169), (262, 170), (262, 171), (262, 172), (262, 173), (262, 174), (262, 175), (262, 177), (263, 155), (263, 157), (263, 158), (263, 159), (263, 160), (263, 161), (263, 162), (263, 163),
(263, 164), (263, 165), (263, 166), (263, 167), (263, 168), (263, 169), (263, 170), (263, 171), (263, 172), (263, 173), (263, 174), (263, 177), (264, 155), (264, 157), (264, 158), (264, 159), (264, 160), (264, 161), (264, 162), (264, 163), (264, 164), (264, 165), (264, 166), (264, 167), (264, 168), (264, 169), (264, 170), (264, 171), (264, 172), (264, 173), (264, 177), (265, 154), (265, 156), (265, 157), (265, 158), (265, 159), (265, 160), (265, 161), (265, 162), (265, 163), (265, 164), (265, 165), (265, 166), (265, 167), (265, 168), (265, 169), (265, 170), (265, 171), (265, 172), (265, 177), (266, 153), (266, 155), (266, 156), (266, 157), (266, 158), (266, 159), (266, 160), (266, 161), (266, 162), (266, 163), (266, 164), (266, 165), (266, 166), (266, 167), (266, 168), (266, 169), (266, 170), (266, 171), (266, 173), (267, 153), (267, 155), (267, 156),
(267, 157), (267, 158), (267, 159), (267, 160), (267, 161), (267, 162), (267, 163), (267, 164), (267, 165), (267, 166), (267, 167), (267, 168), (267, 169), (267, 170), (267, 172), (268, 152), (268, 154), (268, 155), (268, 156), (268, 157), (268, 158), (268, 159), (268, 160), (268, 161), (268, 162), (268, 163), (268, 164), (268, 165), (268, 166), (268, 167), (268, 168), (268, 169), (268, 171), (269, 152), (269, 154), (269, 155), (269, 156), (269, 157), (269, 158), (269, 159), (269, 160), (269, 161), (269, 162), (269, 163), (269, 164), (269, 165), (269, 166), (269, 167), (269, 168), (269, 170), (270, 151), (270, 153), (270, 154), (270, 155), (270, 156), (270, 157), (270, 158), (270, 159), (270, 160), (270, 161), (270, 162), (270, 163), (270, 164), (270, 165), (270, 166), (270, 167), (270, 169), (271, 150), (271, 152), (271, 153), (271, 154), (271, 155),
(271, 156), (271, 157), (271, 158), (271, 159), (271, 160), (271, 161), (271, 162), (271, 163), (271, 164), (271, 165), (271, 166), (271, 168), (272, 150), (272, 152), (272, 153), (272, 154), (272, 155), (272, 156), (272, 157), (272, 158), (272, 159), (272, 160), (272, 161), (272, 162), (272, 163), (272, 164), (272, 165), (272, 167), (273, 149), (273, 151), (273, 152), (273, 153), (273, 154), (273, 155), (273, 156), (273, 157), (273, 158), (273, 159), (273, 160), (273, 161), (273, 162), (273, 163), (273, 164), (273, 166), (274, 148), (274, 150), (274, 151), (274, 152), (274, 153), (274, 154), (274, 155), (274, 156), (274, 157), (274, 158), (274, 159), (274, 160), (274, 161), (274, 162), (274, 163), (274, 165), (275, 148), (275, 150), (275, 151), (275, 152), (275, 153), (275, 154), (275, 155), (275, 156), (275, 157), (275, 158), (275, 159), (275, 160),
(275, 161), (275, 162), (275, 164), (276, 147), (276, 149), (276, 150), (276, 151), (276, 152), (276, 153), (276, 154), (276, 155), (276, 156), (276, 157), (276, 158), (276, 159), (276, 160), (276, 161), (276, 163), (277, 146), (277, 148), (277, 149), (277, 150), (277, 151), (277, 152), (277, 153), (277, 154), (277, 155), (277, 156), (277, 157), (277, 158), (277, 159), (277, 162), (278, 145), (278, 147), (278, 148), (278, 149), (278, 150), (278, 151), (278, 152), (278, 153), (278, 154), (278, 155), (278, 156), (278, 157), (278, 158), (278, 161), (279, 145), (279, 147), (279, 148), (279, 149), (279, 150), (279, 151), (279, 152), (279, 153), (279, 154), (279, 155), (279, 156), (279, 157), (279, 160), (280, 144), (280, 146), (280, 147), (280, 148), (280, 149), (280, 150), (280, 151), (280, 152), (280, 153), (280, 154), (280, 155), (280, 156), (280, 159),
(281, 144), (281, 146), (281, 147), (281, 148), (281, 149), (281, 150), (281, 151), (281, 152), (281, 153), (281, 154), (281, 155), (281, 158), (282, 143), (282, 145), (282, 146), (282, 147), (282, 148), (282, 149), (282, 150), (282, 151), (282, 152), (282, 153), (282, 154), (282, 157), (283, 141), (283, 144), (283, 145), (283, 146), (283, 147), (283, 148), (283, 149), (283, 150), (283, 151), (283, 152), (283, 153), (284, 136), (284, 138), (284, 139), (284, 140), (284, 143), (284, 144), (284, 145), (284, 146), (284, 147), (284, 148), (284, 149), (284, 150), (284, 151), (284, 152), (284, 154), (285, 135), (285, 141), (285, 142), (285, 143), (285, 144), (285, 145), (285, 146), (285, 147), (285, 148), (285, 149), (285, 150), (285, 151), (285, 153), (286, 135), (286, 137), (286, 138), (286, 139), (286, 140), (286, 141), (286, 142), (286, 143), (286, 144),
(286, 145), (286, 146), (286, 147), (286, 148), (286, 149), (286, 150), (286, 152), (287, 128), (287, 129), (287, 130), (287, 133), (287, 135), (287, 136), (287, 137), (287, 138), (287, 139), (287, 140), (287, 141), (287, 142), (287, 143), (287, 144), (287, 145), (287, 146), (287, 147), (287, 148), (287, 149), (287, 151), (288, 126), (288, 130), (288, 131), (288, 132), (288, 135), (288, 136), (288, 137), (288, 138), (288, 139), (288, 140), (288, 141), (288, 142), (288, 143), (288, 144), (288, 145), (288, 146), (288, 147), (288, 148), (288, 150), (289, 126), (289, 128), (289, 129), (289, 130), (289, 133), (289, 134), (289, 135), (289, 136), (289, 137), (289, 138), (289, 139), (289, 140), (289, 141), (289, 142), (289, 143), (289, 144), (289, 145), (289, 146), (289, 147), (289, 149), (290, 126), (290, 134), (290, 135), (290, 136), (290, 137), (290, 138),
(290, 139), (290, 140), (290, 141), (290, 148), (291, 128), (291, 130), (291, 131), (291, 132), (291, 133), (291, 134), (291, 141), (291, 142), (291, 143), (291, 144), (291, 145), (291, 147), (292, 134), (292, 135), (292, 136), (292, 137), (292, 138), (292, 139), (292, 140), (292, 141), )
coordinates_FF3300 = ((114, 79),
(114, 80), (114, 81), (114, 82), (114, 84), (115, 80), (115, 82), (115, 83), (115, 85), (115, 86), (116, 80), (116, 82), (116, 83), (116, 84), (116, 86), (117, 80), (117, 82), (117, 83), (117, 84), (117, 85), (117, 87), (118, 80), (118, 82), (118, 83), (118, 84), (118, 85), (118, 87), (119, 80), (119, 82), (119, 83), (119, 84), (119, 85), (119, 86), (119, 87), (119, 88), (120, 80), (120, 82), (120, 83), (120, 84), (120, 85), (120, 86), (120, 88), (121, 81), (121, 83), (121, 84), (121, 85), (121, 86), (121, 88), (122, 81), (122, 83), (122, 84), (122, 85), (122, 86), (122, 88), (123, 84), (123, 85), (123, 86), (123, 87), (123, 89), (124, 82), (124, 85), (124, 86), (124, 87), (124, 89), (125, 83), (125, 87), (125, 89), (126, 84), (126, 85), (127, 87), (127, 90), (308, 85), (309, 82),
(309, 86), (310, 81), (310, 84), (310, 86), (311, 80), (311, 82), (311, 83), (311, 84), (311, 86), (312, 79), (312, 81), (312, 82), (312, 83), (312, 84), (312, 86), (313, 79), (313, 81), (313, 82), (313, 83), (313, 85), (314, 79), (314, 80), (314, 81), (314, 82), (314, 83), (314, 85), (315, 78), (315, 80), (315, 81), (315, 82), (315, 83), (315, 85), (316, 78), (316, 80), (316, 81), (316, 82), (316, 83), (316, 85), (317, 78), (317, 80), (317, 81), (317, 82), (317, 84), (318, 77), (318, 79), (318, 80), (318, 81), (318, 82), (318, 84), (319, 77), (319, 79), (319, 80), (319, 81), (319, 82), (319, 83), (319, 84), (320, 77), (320, 79), (320, 80), (320, 81), (320, 83), (321, 77), (321, 79), (321, 80), (321, 81), (321, 83), (322, 77), (322, 79), (322, 80), (322, 82), (323, 77), (323, 79),
(323, 80), (323, 82), (324, 77), (324, 79), (324, 80), (324, 82), (325, 76), (325, 78), (325, 79), (325, 81), (326, 76), (326, 81), (327, 76), (327, 78), (327, 79), (327, 80), (327, 81), )
coordinates_CCCC00 = ((201, 182),
(201, 183), (202, 167), (202, 182), (202, 184), (203, 161), (203, 162), (203, 163), (203, 164), (203, 165), (203, 166), (203, 168), (203, 182), (204, 157), (204, 159), (204, 160), (204, 169), (204, 181), (204, 183), (204, 184), (204, 186), (205, 157), (205, 161), (205, 162), (205, 163), (205, 164), (205, 165), (205, 166), (205, 167), (205, 168), (205, 170), (205, 180), (205, 182), (205, 183), (205, 184), (205, 185), (205, 187), (206, 157), (206, 159), (206, 160), (206, 161), (206, 162), (206, 163), (206, 164), (206, 165), (206, 166), (206, 167), (206, 168), (206, 171), (206, 179), (206, 182), (206, 183), (206, 184), (206, 185), (206, 187), (207, 157), (207, 159), (207, 160), (207, 161), (207, 162), (207, 163), (207, 164), (207, 165), (207, 166), (207, 167), (207, 168), (207, 169), (207, 170), (207, 172), (207, 178), (207, 180), (207, 181), (207, 182),
(207, 183), (207, 184), (207, 185), (207, 186), (207, 188), (208, 157), (208, 159), (208, 160), (208, 161), (208, 162), (208, 163), (208, 164), (208, 165), (208, 166), (208, 167), (208, 168), (208, 169), (208, 170), (208, 171), (208, 173), (208, 177), (208, 179), (208, 180), (208, 181), (208, 182), (208, 183), (208, 184), (208, 185), (208, 186), (208, 188), (209, 157), (209, 159), (209, 160), (209, 161), (209, 162), (209, 163), (209, 164), (209, 165), (209, 166), (209, 167), (209, 168), (209, 169), (209, 170), (209, 171), (209, 172), (209, 178), (209, 179), (209, 180), (209, 181), (209, 182), (209, 183), (209, 184), (209, 185), (209, 186), (209, 187), (209, 189), (210, 157), (210, 159), (210, 160), (210, 161), (210, 162), (210, 163), (210, 164), (210, 165), (210, 166), (210, 167), (210, 168), (210, 169), (210, 170), (210, 171), (210, 172), (210, 173),
(210, 177), (210, 178), (210, 179), (210, 180), (210, 181), (210, 182), (210, 183), (210, 184), (210, 185), (210, 186), (210, 187), (210, 189), (211, 157), (211, 159), (211, 160), (211, 161), (211, 162), (211, 163), (211, 164), (211, 165), (211, 166), (211, 167), (211, 168), (211, 169), (211, 170), (211, 171), (211, 172), (211, 173), (211, 174), (211, 176), (211, 177), (211, 178), (211, 179), (211, 180), (211, 181), (211, 182), (211, 183), (211, 184), (211, 185), (211, 186), (211, 187), (211, 189), (212, 157), (212, 159), (212, 160), (212, 161), (212, 162), (212, 163), (212, 164), (212, 165), (212, 166), (212, 167), (212, 168), (212, 169), (212, 170), (212, 171), (212, 172), (212, 173), (212, 174), (212, 175), (212, 176), (212, 177), (212, 178), (212, 179), (212, 180), (212, 181), (212, 182), (212, 183), (212, 184), (212, 185), (212, 186), (212, 187),
(212, 189), (213, 157), (213, 159), (213, 160), (213, 161), (213, 162), (213, 163), (213, 164), (213, 165), (213, 166), (213, 167), (213, 168), (213, 169), (213, 170), (213, 171), (213, 172), (213, 173), (213, 174), (213, 175), (213, 176), (213, 177), (213, 178), (213, 179), (213, 180), (213, 181), (213, 182), (213, 183), (213, 184), (213, 185), (213, 186), (213, 187), (213, 188), (213, 190), (214, 157), (214, 159), (214, 160), (214, 161), (214, 162), (214, 163), (214, 164), (214, 165), (214, 166), (214, 167), (214, 168), (214, 169), (214, 170), (214, 171), (214, 172), (214, 173), (214, 174), (214, 175), (214, 176), (214, 177), (214, 178), (214, 179), (214, 180), (214, 181), (214, 182), (214, 183), (214, 184), (214, 185), (214, 186), (214, 187), (214, 188), (214, 190), (215, 157), (215, 159), (215, 160), (215, 161), (215, 162), (215, 163), (215, 164),
(215, 165), (215, 166), (215, 167), (215, 168), (215, 169), (215, 170), (215, 171), (215, 172), (215, 173), (215, 174), (215, 175), (215, 176), (215, 177), (215, 178), (215, 179), (215, 180), (215, 181), (215, 182), (215, 183), (215, 184), (215, 185), (215, 186), (215, 187), (215, 188), (215, 190), (216, 157), (216, 159), (216, 160), (216, 161), (216, 162), (216, 163), (216, 164), (216, 165), (216, 166), (216, 167), (216, 168), (216, 169), (216, 170), (216, 171), (216, 172), (216, 173), (216, 174), (216, 175), (216, 176), (216, 177), (216, 178), (216, 179), (216, 180), (216, 181), (216, 182), (216, 183), (216, 184), (216, 185), (216, 186), (216, 187), (216, 188), (216, 190), (217, 157), (217, 159), (217, 160), (217, 161), (217, 162), (217, 163), (217, 164), (217, 165), (217, 166), (217, 167), (217, 168), (217, 169), (217, 170), (217, 171), (217, 172),
(217, 173), (217, 174), (217, 175), (217, 176), (217, 177), (217, 178), (217, 179), (217, 180), (217, 181), (217, 182), (217, 183), (217, 184), (217, 185), (217, 186), (217, 187), (217, 188), (217, 190), (218, 157), (218, 159), (218, 160), (218, 161), (218, 162), (218, 163), (218, 164), (218, 165), (218, 166), (218, 167), (218, 168), (218, 169), (218, 170), (218, 171), (218, 172), (218, 173), (218, 174), (218, 175), (218, 176), (218, 177), (218, 178), (218, 179), (218, 180), (218, 181), (218, 182), (218, 183), (218, 184), (218, 185), (218, 186), (218, 187), (218, 188), (218, 190), (219, 157), (219, 160), (219, 161), (219, 162), (219, 163), (219, 164), (219, 165), (219, 166), (219, 167), (219, 168), (219, 169), (219, 170), (219, 171), (219, 172), (219, 173), (219, 174), (219, 175), (219, 176), (219, 177), (219, 178), (219, 179), (219, 180), (219, 181),
(219, 182), (219, 183), (219, 184), (219, 185), (219, 186), (219, 187), (219, 188), (219, 190), (220, 158), (220, 160), (220, 161), (220, 162), (220, 163), (220, 164), (220, 165), (220, 166), (220, 167), (220, 168), (220, 169), (220, 170), (220, 171), (220, 172), (220, 173), (220, 174), (220, 175), (220, 176), (220, 177), (220, 178), (220, 179), (220, 180), (220, 181), (220, 182), (220, 183), (220, 184), (220, 185), (220, 186), (220, 187), (220, 188), (220, 190), (221, 159), (221, 161), (221, 162), (221, 163), (221, 164), (221, 165), (221, 166), (221, 167), (221, 168), (221, 169), (221, 170), (221, 171), (221, 172), (221, 173), (221, 174), (221, 175), (221, 176), (221, 177), (221, 178), (221, 179), (221, 180), (221, 181), (221, 182), (221, 183), (221, 184), (221, 185), (221, 186), (221, 187), (221, 188), (221, 190), (221, 191), (222, 160), (222, 162),
(222, 163), (222, 164), (222, 165), (222, 166), (222, 167), (222, 168), (222, 169), (222, 170), (222, 171), (222, 172), (222, 173), (222, 174), (222, 175), (222, 176), (222, 177), (222, 178), (222, 179), (222, 180), (222, 181), (222, 182), (222, 183), (222, 184), (222, 185), (222, 186), (222, 187), (222, 188), (222, 189), (222, 190), (222, 191), (223, 160), (223, 162), (223, 163), (223, 164), (223, 165), (223, 166), (223, 167), (223, 168), (223, 169), (223, 170), (223, 171), (223, 172), (223, 173), (223, 174), (223, 175), (223, 176), (223, 177), (223, 178), (223, 179), (223, 180), (223, 181), (223, 182), (223, 183), (223, 184), (223, 185), (223, 186), (223, 187), (223, 188), (223, 189), (223, 190), (223, 191), (224, 159), (224, 161), (224, 162), (224, 163), (224, 164), (224, 165), (224, 166), (224, 167), (224, 168), (224, 169), (224, 170), (224, 171),
(224, 172), (224, 173), (224, 174), (224, 175), (224, 176), (224, 177), (224, 178), (224, 179), (224, 180), (224, 181), (224, 182), (224, 183), (224, 184), (224, 185), (224, 186), (224, 187), (224, 188), (224, 190), (225, 158), (225, 160), (225, 161), (225, 162), (225, 163), (225, 164), (225, 165), (225, 166), (225, 167), (225, 168), (225, 169), (225, 170), (225, 171), (225, 172), (225, 173), (225, 174), (225, 175), (225, 176), (225, 177), (225, 178), (225, 179), (225, 180), (225, 181), (225, 182), (225, 183), (225, 184), (225, 185), (225, 186), (225, 187), (225, 188), (225, 190), (226, 157), (226, 159), (226, 160), (226, 161), (226, 162), (226, 163), (226, 164), (226, 165), (226, 166), (226, 167), (226, 168), (226, 169), (226, 170), (226, 171), (226, 172), (226, 173), (226, 174), (226, 175), (226, 176), (226, 177), (226, 178), (226, 179), (226, 180),
(226, 181), (226, 182), (226, 183), (226, 184), (226, 185), (226, 186), (226, 187), (226, 188), (226, 190), (227, 157), (227, 159), (227, 160), (227, 161), (227, 162), (227, 163), (227, 164), (227, 165), (227, 166), (227, 167), (227, 168), (227, 169), (227, 170), (227, 171), (227, 172), (227, 173), (227, 174), (227, 175), (227, 176), (227, 177), (227, 178), (227, 179), (227, 180), (227, 181), (227, 182), (227, 183), (227, 184), (227, 185), (227, 186), (227, 187), (227, 188), (227, 190), (228, 157), (228, 159), (228, 160), (228, 161), (228, 162), (228, 163), (228, 164), (228, 165), (228, 166), (228, 167), (228, 168), (228, 169), (228, 170), (228, 171), (228, 172), (228, 173), (228, 174), (228, 175), (228, 176), (228, 177), (228, 178), (228, 179), (228, 180), (228, 181), (228, 182), (228, 183), (228, 184), (228, 185), (228, 186), (228, 187), (228, 188),
(228, 190), (229, 157), (229, 159), (229, 160), (229, 161), (229, 162), (229, 163), (229, 164), (229, 165), (229, 166), (229, 167), (229, 168), (229, 169), (229, 170), (229, 171), (229, 172), (229, 173), (229, 174), (229, 175), (229, 176), (229, 177), (229, 178), (229, 179), (229, 180), (229, 181), (229, 182), (229, 183), (229, 184), (229, 185), (229, 186), (229, 187), (229, 188), (229, 190), (230, 157), (230, 159), (230, 160), (230, 161), (230, 162), (230, 163), (230, 164), (230, 165), (230, 166), (230, 167), (230, 168), (230, 169), (230, 170), (230, 171), (230, 172), (230, 173), (230, 174), (230, 175), (230, 176), (230, 177), (230, 178), (230, 179), (230, 180), (230, 181), (230, 182), (230, 183), (230, 184), (230, 185), (230, 186), (230, 187), (230, 188), (230, 190), (231, 157), (231, 159), (231, 160), (231, 161), (231, 162), (231, 163), (231, 164),
(231, 165), (231, 166), (231, 167), (231, 168), (231, 169), (231, 170), (231, 171), (231, 172), (231, 173), (231, 174), (231, 175), (231, 176), (231, 177), (231, 178), (231, 179), (231, 180), (231, 181), (231, 182), (231, 183), (231, 184), (231, 185), (231, 186), (231, 187), (231, 188), (231, 190), (232, 157), (232, 159), (232, 160), (232, 161), (232, 162), (232, 163), (232, 164), (232, 165), (232, 166), (232, 167), (232, 168), (232, 169), (232, 170), (232, 171), (232, 172), (232, 173), (232, 174), (232, 175), (232, 176), (232, 177), (232, 178), (232, 179), (232, 180), (232, 181), (232, 182), (232, 183), (232, 184), (232, 185), (232, 186), (232, 187), (232, 189), (233, 157), (233, 159), (233, 160), (233, 161), (233, 162), (233, 163), (233, 164), (233, 165), (233, 166), (233, 167), (233, 168), (233, 169), (233, 170), (233, 171), (233, 172), (233, 173),
(233, 174), (233, 175), (233, 176), (233, 177), (233, 178), (233, 179), (233, 180), (233, 181), (233, 182), (233, 183), (233, 184), (233, 185), (233, 186), (233, 187), (233, 189), (234, 157), (234, 159), (234, 160), (234, 161), (234, 162), (234, 163), (234, 164), (234, 165), (234, 166), (234, 167), (234, 168), (234, 169), (234, 170), (234, 171), (234, 172), (234, 173), (234, 174), (234, 175), (234, 176), (234, 177), (234, 178), (234, 179), (234, 180), (234, 181), (234, 182), (234, 183), (234, 184), (234, 185), (234, 186), (234, 187), (234, 189), (235, 157), (235, 159), (235, 160), (235, 161), (235, 162), (235, 163), (235, 164), (235, 165), (235, 166), (235, 167), (235, 168), (235, 169), (235, 170), (235, 177), (235, 178), (235, 179), (235, 180), (235, 181), (235, 182), (235, 183), (235, 184), (235, 185), (235, 186), (235, 188), (236, 157), (236, 159),
(236, 160), (236, 161), (236, 162), (236, 163), (236, 164), (236, 165), (236, 166), (236, 167), (236, 168), (236, 169), (236, 171), (236, 172), (236, 173), (236, 174), (236, 175), (236, 178), (236, 179), (236, 180), (236, 181), (236, 182), (236, 183), (236, 184), (236, 185), (236, 186), (236, 188), (237, 157), (237, 159), (237, 160), (237, 161), (237, 162), (237, 163), (237, 164), (237, 165), (237, 166), (237, 167), (237, 168), (237, 170), (237, 177), (237, 179), (237, 180), (237, 181), (237, 182), (237, 183), (237, 184), (237, 185), (237, 187), (238, 157), (238, 159), (238, 160), (238, 161), (238, 162), (238, 163), (238, 164), (238, 165), (238, 166), (238, 167), (238, 169), (238, 178), (238, 180), (238, 181), (238, 182), (238, 183), (238, 184), (238, 187), (239, 157), (239, 159), (239, 160), (239, 161), (239, 162), (239, 163), (239, 164), (239, 165),
(239, 166), (239, 168), (239, 179), (239, 181), (239, 182), (239, 186), (240, 157), (240, 159), (240, 160), (240, 161), (240, 162), (240, 163), (240, 164), (240, 165), (240, 166), (240, 168), (240, 179), (240, 181), (240, 182), (240, 184), (241, 157), (241, 159), (241, 160), (241, 161), (241, 162), (241, 163), (241, 164), (241, 165), (241, 166), (241, 168), (241, 179), (241, 181), (242, 157), (242, 159), (242, 160), (242, 161), (242, 162), (242, 163), (242, 164), (242, 165), (242, 166), (242, 168), (243, 157), (243, 159), (243, 160), (243, 161), (243, 162), (243, 163), (243, 164), (243, 165), (243, 166), (243, 168), (244, 157), (244, 159), (244, 160), (244, 161), (244, 162), (244, 163), (244, 164), (244, 165), (244, 166), (244, 168), )
coordinates_CC0000 = ((201, 52),
(202, 50), (202, 52), (203, 49), (203, 52), (204, 48), (204, 52), (205, 50), (205, 52), (206, 47), (206, 49), (206, 50), (206, 52), (207, 46), (207, 48), (207, 49), (207, 51), (207, 52), (208, 46), (208, 48), (208, 49), (208, 51), (209, 45), (209, 47), (209, 48), (209, 49), (209, 51), (210, 45), (210, 47), (210, 48), (210, 49), (210, 51), (211, 44), (211, 46), (211, 47), (211, 48), (211, 49), (211, 51), (212, 44), (212, 46), (212, 47), (212, 48), (212, 50), (213, 43), (213, 45), (213, 46), (213, 47), (213, 48), (213, 50), (214, 43), (214, 45), (214, 46), (214, 47), (214, 48), (214, 50), (215, 43), (215, 45), (215, 46), (215, 47), (215, 48), (215, 50), (216, 42), (216, 44), (216, 45), (216, 46), (216, 47), (216, 48), (216, 49), (216, 50), (217, 42), (217, 45), (217, 46), (217, 47),
(217, 49), (218, 43), (218, 44), (218, 47), (218, 49), (219, 45), (219, 46), (219, 49), (220, 47), (220, 49), (228, 42), (229, 42), (229, 43), (229, 48), (229, 49), (230, 44), (230, 47), (230, 50), (231, 43), (231, 48), (231, 50), (232, 43), (232, 47), (232, 48), (232, 49), (232, 51), (233, 44), (233, 46), (233, 47), (233, 48), (233, 49), (233, 51), (234, 44), (234, 46), (234, 47), (234, 48), (234, 49), (234, 50), (234, 52), (235, 45), (235, 47), (235, 48), (235, 49), (235, 50), (235, 52), (236, 45), (236, 47), (236, 48), (236, 49), (236, 50), (236, 51), (236, 53), (237, 46), (237, 48), (237, 49), (237, 50), (237, 51), (237, 53), (238, 47), (238, 49), (238, 50), (238, 51), (238, 52), (238, 54), (239, 48), (239, 50), (239, 51), (239, 52), (239, 54), (240, 49), (240, 51), (240, 52),
(240, 53), (240, 55), (241, 50), (241, 53), (241, 55), (242, 51), (242, 55), (243, 53), (243, 55), )
coordinates_660099 = ((176, 86),
(176, 88), (176, 89), (176, 90), (176, 91), (177, 86), (177, 92), (177, 94), (178, 86), (178, 88), (178, 89), (178, 90), (178, 91), (178, 95), (179, 87), (179, 89), (179, 90), (179, 91), (179, 92), (179, 93), (179, 96), (180, 88), (180, 90), (180, 91), (180, 92), (180, 93), (180, 94), (180, 96), (181, 88), (181, 90), (181, 91), (181, 92), (181, 93), (181, 94), (181, 96), (182, 89), (182, 91), (182, 92), (182, 93), (182, 94), (182, 96), (183, 89), (183, 91), (183, 92), (183, 93), (183, 94), (183, 96), (184, 89), (184, 91), (184, 92), (184, 93), (184, 94), (184, 96), (185, 88), (185, 90), (185, 91), (185, 92), (185, 93), (186, 88), (186, 90), (186, 91), (186, 92), (186, 94), (187, 88), (187, 90), (187, 91), (187, 93), (188, 88), (188, 90), (188, 91), (188, 93), (189, 88), (189, 90),
(189, 91), (189, 92), (189, 93), (190, 87), (190, 89), (190, 90), (190, 92), (191, 87), (191, 89), (191, 90), (191, 92), (192, 87), (192, 89), (192, 90), (192, 92), (193, 86), (193, 87), (193, 88), (193, 89), (193, 90), (193, 91), (193, 92), (193, 93), (194, 86), (194, 88), (194, 89), (194, 90), (194, 91), (194, 93), (195, 86), (195, 88), (195, 89), (195, 90), (195, 91), (195, 93), (196, 86), (196, 88), (196, 89), (196, 90), (196, 91), (196, 92), (196, 94), (197, 86), (197, 88), (197, 89), (197, 90), (197, 91), (197, 92), (197, 93), (197, 95), (198, 87), (198, 89), (198, 90), (198, 91), (198, 92), (198, 93), (198, 94), (198, 97), (199, 90), (199, 91), (199, 92), (199, 93), (199, 94), (199, 95), (199, 97), (200, 88), (200, 90), (200, 91), (200, 92), (200, 93), (200, 94), (200, 95),
(200, 96), (200, 98), (201, 89), (201, 91), (201, 92), (201, 93), (201, 94), (201, 95), (201, 96), (201, 97), (201, 99), (201, 172), (201, 174), (202, 90), (202, 92), (202, 93), (202, 94), (202, 95), (202, 96), (202, 97), (202, 99), (202, 171), (202, 175), (203, 91), (203, 93), (203, 94), (203, 95), (203, 96), (203, 97), (203, 99), (203, 170), (203, 172), (203, 173), (203, 175), (204, 92), (204, 94), (204, 95), (204, 96), (204, 97), (204, 98), (204, 100), (204, 171), (204, 174), (204, 176), (205, 92), (205, 94), (205, 95), (205, 96), (205, 97), (205, 98), (205, 100), (205, 172), (205, 175), (205, 177), (206, 93), (206, 95), (206, 96), (206, 97), (206, 98), (206, 100), (206, 173), (206, 177), (207, 93), (207, 95), (207, 96), (207, 97), (207, 98), (207, 100), (207, 174), (207, 176), (208, 93), (208, 95),
(208, 96), (208, 97), (208, 98), (208, 100), (208, 175), (209, 93), (209, 95), (209, 96), (209, 97), (209, 98), (209, 100), (210, 93), (210, 94), (210, 95), (210, 96), (210, 97), (210, 98), (210, 99), (210, 101), (211, 94), (211, 96), (211, 97), (211, 98), (211, 99), (211, 101), (212, 94), (212, 96), (212, 97), (212, 98), (212, 99), (212, 101), (213, 94), (213, 96), (213, 97), (213, 98), (213, 99), (213, 101), (214, 94), (214, 96), (214, 97), (214, 98), (214, 99), (214, 101), (215, 96), (215, 97), (215, 98), (215, 99), (215, 101), (216, 95), (216, 97), (216, 98), (216, 99), (216, 101), (217, 96), (217, 98), (217, 99), (217, 101), (218, 97), (218, 99), (218, 100), (218, 102), (219, 98), (219, 100), (219, 102), (220, 99), (220, 102), (221, 100), (221, 102), (222, 101), (222, 102), (223, 101), (223, 102),
(224, 100), (224, 102), (225, 99), (225, 102), (226, 98), (226, 100), (226, 102), (227, 99), (227, 100), (227, 102), (228, 95), (228, 98), (228, 99), (228, 100), (228, 102), (229, 95), (229, 97), (229, 98), (229, 99), (229, 100), (229, 102), (230, 95), (230, 97), (230, 98), (230, 99), (230, 100), (230, 102), (231, 94), (231, 95), (231, 96), (231, 97), (231, 98), (231, 99), (231, 100), (231, 102), (232, 94), (232, 96), (232, 97), (232, 98), (232, 99), (232, 100), (232, 102), (233, 94), (233, 96), (233, 97), (233, 98), (233, 99), (233, 101), (234, 94), (234, 96), (234, 97), (234, 98), (234, 99), (234, 101), (235, 94), (235, 96), (235, 97), (235, 98), (235, 100), (236, 93), (236, 95), (236, 96), (236, 97), (236, 98), (236, 100), (237, 93), (237, 95), (237, 96), (237, 97), (237, 99), (238, 93), (238, 95),
(238, 96), (238, 97), (238, 99), (238, 172), (238, 175), (239, 93), (239, 95), (239, 96), (239, 98), (239, 171), (239, 176), (240, 93), (240, 95), (240, 96), (240, 98), (240, 170), (240, 172), (240, 173), (240, 174), (240, 175), (240, 177), (241, 92), (241, 94), (241, 95), (241, 97), (241, 170), (241, 172), (241, 173), (241, 177), (242, 92), (242, 94), (242, 95), (242, 97), (242, 170), (242, 175), (243, 92), (243, 94), (243, 96), (243, 171), (243, 172), (243, 173), (244, 92), (244, 94), (244, 96), (245, 91), (245, 93), (245, 95), (246, 90), (246, 92), (246, 93), (246, 95), (247, 89), (247, 91), (247, 92), (247, 93), (247, 95), (248, 88), (248, 90), (248, 91), (248, 92), (248, 94), (249, 87), (249, 89), (249, 90), (249, 91), (249, 92), (249, 94), (250, 86), (250, 88), (250, 89), (250, 90), (250, 91),
(250, 92), (250, 94), (251, 86), (251, 88), (251, 89), (251, 90), (251, 91), (251, 92), (251, 94), (252, 86), (252, 88), (252, 89), (252, 90), (252, 91), (252, 92), (252, 94), (253, 86), (253, 88), (253, 89), (253, 90), (253, 91), (253, 93), (254, 86), (254, 89), (254, 90), (254, 91), (254, 93), (255, 87), (255, 89), (255, 90), (255, 91), (255, 93), (256, 88), (256, 90), (256, 91), (256, 93), (257, 89), (257, 91), (257, 93), (258, 89), (258, 91), (258, 93), (259, 89), (259, 91), (259, 93), (260, 89), (260, 91), (260, 93), (261, 89), (261, 91), (261, 93), (262, 89), (262, 93), (263, 91), (263, 93), (264, 93), )
coordinates_CCFFFF = ((99, 187),
(99, 189), (99, 190), (99, 191), (100, 185), (100, 192), (100, 193), (100, 194), (101, 183), (101, 184), (101, 187), (101, 188), (101, 189), (101, 190), (101, 191), (101, 195), (102, 182), (102, 185), (102, 186), (102, 187), (102, 188), (102, 189), (102, 190), (102, 191), (102, 192), (102, 193), (102, 194), (102, 197), (103, 182), (103, 184), (103, 185), (103, 186), (103, 187), (103, 188), (103, 189), (103, 190), (103, 191), (103, 192), (103, 193), (103, 194), (103, 195), (103, 198), (104, 182), (104, 184), (104, 185), (104, 186), (104, 187), (104, 188), (104, 189), (104, 190), (104, 191), (104, 192), (104, 193), (104, 194), (104, 195), (104, 196), (104, 197), (104, 199), (105, 182), (105, 184), (105, 185), (105, 186), (105, 187), (105, 188), (105, 189), (105, 190), (105, 191), (105, 192), (105, 193), (105, 194), (105, 195), (105, 196), (105, 197),
(105, 198), (105, 201), (106, 181), (106, 183), (106, 184), (106, 185), (106, 186), (106, 187), (106, 188), (106, 189), (106, 190), (106, 191), (106, 192), (106, 193), (106, 194), (106, 195), (106, 196), (106, 197), (106, 198), (106, 199), (106, 202), (107, 181), (107, 183), (107, 184), (107, 185), (107, 186), (107, 187), (107, 188), (107, 189), (107, 190), (107, 191), (107, 192), (107, 193), (107, 194), (107, 195), (107, 196), (107, 197), (107, 198), (107, 199), (107, 200), (107, 203), (108, 180), (108, 182), (108, 183), (108, 184), (108, 185), (108, 186), (108, 187), (108, 188), (108, 189), (108, 190), (108, 191), (108, 192), (108, 193), (108, 194), (108, 195), (108, 196), (108, 197), (108, 198), (108, 199), (108, 200), (108, 201), (108, 204), (109, 180), (109, 182), (109, 183), (109, 184), (109, 185), (109, 186), (109, 187), (109, 188), (109, 189),
(109, 190), (109, 191), (109, 192), (109, 193), (109, 194), (109, 195), (109, 196), (109, 197), (109, 198), (109, 199), (109, 200), (109, 201), (109, 202), (109, 205), (110, 179), (110, 181), (110, 182), (110, 183), (110, 184), (110, 185), (110, 186), (110, 187), (110, 188), (110, 189), (110, 190), (110, 191), (110, 192), (110, 193), (110, 194), (110, 195), (110, 196), (110, 197), (110, 198), (110, 199), (110, 200), (110, 201), (110, 202), (110, 203), (111, 179), (111, 181), (111, 182), (111, 183), (111, 184), (111, 185), (111, 186), (111, 187), (111, 188), (111, 189), (111, 190), (111, 191), (111, 192), (111, 193), (111, 194), (111, 195), (111, 196), (111, 197), (111, 198), (111, 199), (111, 200), (111, 201), (111, 202), (111, 203), (111, 204), (111, 206), (112, 178), (112, 180), (112, 181), (112, 182), (112, 183), (112, 184), (112, 185), (112, 186),
(112, 187), (112, 188), (112, 189), (112, 190), (112, 191), (112, 192), (112, 193), (112, 194), (112, 195), (112, 196), (112, 197), (112, 198), (112, 199), (112, 200), (112, 201), (112, 202), (112, 203), (112, 204), (112, 205), (112, 207), (113, 177), (113, 179), (113, 180), (113, 181), (113, 182), (113, 183), (113, 184), (113, 185), (113, 186), (113, 187), (113, 188), (113, 189), (113, 190), (113, 191), (113, 192), (113, 193), (113, 194), (113, 195), (113, 196), (113, 197), (113, 198), (113, 199), (113, 200), (113, 201), (113, 202), (113, 203), (113, 204), (113, 205), (113, 206), (113, 208), (114, 176), (114, 178), (114, 179), (114, 180), (114, 181), (114, 182), (114, 183), (114, 184), (114, 185), (114, 186), (114, 187), (114, 188), (114, 189), (114, 190), (114, 191), (114, 192), (114, 193), (114, 194), (114, 195), (114, 196), (114, 197), (114, 198),
(114, 199), (114, 200), (114, 201), (114, 202), (114, 203), (114, 204), (114, 205), (114, 206), (114, 207), (114, 209), (115, 176), (115, 178), (115, 179), (115, 180), (115, 181), (115, 182), (115, 183), (115, 184), (115, 185), (115, 186), (115, 187), (115, 188), (115, 189), (115, 190), (115, 191), (115, 192), (115, 193), (115, 194), (115, 195), (115, 196), (115, 197), (115, 198), (115, 199), (115, 200), (115, 201), (115, 202), (115, 203), (115, 204), (115, 205), (115, 206), (115, 207), (115, 209), (116, 175), (116, 177), (116, 178), (116, 179), (116, 180), (116, 181), (116, 182), (116, 183), (116, 184), (116, 185), (116, 186), (116, 187), (116, 188), (116, 189), (116, 190), (116, 191), (116, 192), (116, 193), (116, 194), (116, 195), (116, 196), (116, 197), (116, 198), (116, 199), (116, 200), (116, 201), (116, 202), (116, 203), (116, 204), (116, 205),
(116, 206), (116, 207), (116, 208), (116, 210), (117, 175), (117, 177), (117, 178), (117, 179), (117, 180), (117, 181), (117, 182), (117, 183), (117, 184), (117, 185), (117, 186), (117, 187), (117, 188), (117, 189), (117, 190), (117, 191), (117, 192), (117, 193), (117, 194), (117, 195), (117, 196), (117, 197), (117, 198), (117, 199), (117, 200), (117, 201), (117, 202), (117, 203), (117, 204), (117, 205), (117, 206), (117, 207), (117, 208), (117, 209), (117, 211), (118, 174), (118, 176), (118, 177), (118, 178), (118, 179), (118, 180), (118, 181), (118, 182), (118, 183), (118, 184), (118, 185), (118, 186), (118, 187), (118, 188), (118, 189), (118, 190), (118, 191), (118, 192), (118, 193), (118, 194), (118, 195), (118, 196), (118, 197), (118, 198), (118, 199), (118, 200), (118, 201), (118, 202), (118, 203), (118, 204), (118, 205), (118, 206), (118, 207),
(118, 208), (118, 209), (118, 211), (119, 174), (119, 176), (119, 177), (119, 178), (119, 179), (119, 180), (119, 181), (119, 182), (119, 183), (119, 184), (119, 185), (119, 186), (119, 187), (119, 188), (119, 189), (119, 190), (119, 191), (119, 192), (119, 193), (119, 194), (119, 195), (119, 196), (119, 197), (119, 198), (119, 199), (119, 200), (119, 201), (119, 202), (119, 203), (119, 204), (119, 205), (119, 206), (119, 207), (119, 208), (119, 209), (119, 211), (120, 173), (120, 176), (120, 177), (120, 178), (120, 179), (120, 180), (120, 181), (120, 182), (120, 183), (120, 184), (120, 185), (120, 186), (120, 187), (120, 188), (120, 189), (120, 190), (120, 191), (120, 192), (120, 193), (120, 194), (120, 195), (120, 196), (120, 197), (120, 198), (120, 199), (120, 200), (120, 201), (120, 202), (120, 203), (120, 204), (120, 205), (120, 206), (120, 207),
(120, 208), (120, 209), (120, 211), (121, 173), (121, 175), (121, 180), (121, 181), (121, 182), (121, 183), (121, 184), (121, 185), (121, 186), (121, 187), (121, 188), (121, 189), (121, 190), (121, 191), (121, 192), (121, 193), (121, 194), (121, 195), (121, 196), (121, 197), (121, 198), (121, 199), (121, 200), (121, 201), (121, 202), (121, 203), (121, 204), (121, 205), (121, 206), (121, 207), (121, 208), (121, 209), (121, 211), (122, 176), (122, 177), (122, 178), (122, 179), (122, 180), (122, 184), (122, 185), (122, 186), (122, 187), (122, 188), (122, 189), (122, 190), (122, 191), (122, 192), (122, 193), (122, 194), (122, 195), (122, 196), (122, 197), (122, 198), (122, 199), (122, 200), (122, 201), (122, 202), (122, 203), (122, 204), (122, 205), (122, 206), (122, 207), (122, 208), (122, 209), (122, 211), (123, 181), (123, 182), (123, 183), (123, 187),
(123, 188), (123, 189), (123, 190), (123, 191), (123, 192), (123, 193), (123, 194), (123, 195), (123, 196), (123, 197), (123, 198), (123, 199), (123, 200), (123, 201), (123, 202), (123, 203), (123, 204), (123, 205), (123, 206), (123, 207), (123, 208), (123, 209), (123, 211), (124, 184), (124, 186), (124, 189), (124, 190), (124, 191), (124, 192), (124, 193), (124, 194), (124, 195), (124, 196), (124, 197), (124, 198), (124, 199), (124, 200), (124, 201), (124, 202), (124, 203), (124, 204), (124, 205), (124, 206), (124, 207), (124, 208), (124, 209), (124, 211), (125, 187), (125, 188), (125, 191), (125, 192), (125, 193), (125, 194), (125, 195), (125, 196), (125, 197), (125, 198), (125, 199), (125, 200), (125, 201), (125, 202), (125, 203), (125, 204), (125, 205), (125, 206), (125, 207), (125, 208), (125, 209), (125, 211), (126, 189), (126, 190), (126, 193),
(126, 194), (126, 195), (126, 196), (126, 197), (126, 198), (126, 199), (126, 200), (126, 201), (126, 202), (126, 203), (126, 204), (126, 205), (126, 206), (126, 207), (126, 208), (126, 209), (126, 211), (127, 192), (127, 195), (127, 196), (127, 197), (127, 198), (127, 199), (127, 200), (127, 201), (127, 202), (127, 203), (127, 204), (127, 205), (127, 206), (127, 207), (127, 208), (127, 209), (127, 211), (128, 193), (128, 195), (128, 196), (128, 197), (128, 198), (128, 199), (128, 200), (128, 201), (128, 202), (128, 203), (128, 204), (128, 205), (128, 206), (128, 207), (128, 208), (128, 209), (128, 211), (129, 195), (129, 197), (129, 198), (129, 199), (129, 200), (129, 201), (129, 202), (129, 203), (129, 204), (129, 205), (129, 206), (129, 207), (129, 208), (129, 209), (129, 211), (130, 195), (130, 197), (130, 198), (130, 199), (130, 200), (130, 201),
(130, 202), (130, 203), (130, 204), (130, 205), (130, 206), (130, 207), (130, 208), (130, 209), (130, 211), (131, 195), (131, 197), (131, 198), (131, 199), (131, 200), (131, 201), (131, 202), (131, 203), (131, 204), (131, 205), (131, 206), (131, 207), (131, 208), (131, 209), (131, 210), (131, 212), (132, 192), (132, 194), (132, 195), (132, 196), (132, 197), (132, 198), (132, 199), (132, 200), (132, 201), (132, 202), (132, 203), (132, 204), (132, 205), (132, 206), (132, 207), (132, 208), (132, 209), (132, 210), (132, 211), (132, 213), (133, 192), (133, 195), (133, 196), (133, 197), (133, 198), (133, 199), (133, 200), (133, 201), (133, 202), (133, 203), (133, 204), (133, 205), (133, 206), (133, 207), (133, 208), (133, 209), (133, 210), (133, 211), (133, 212), (133, 214), (134, 192), (134, 194), (134, 195), (134, 196), (134, 197), (134, 198), (134, 199),
(134, 200), (134, 201), (134, 202), (134, 203), (134, 204), (134, 205), (134, 206), (134, 207), (134, 208), (134, 209), (134, 210), (134, 211), (134, 212), (134, 214), (135, 192), (135, 194), (135, 195), (135, 196), (135, 197), (135, 198), (135, 199), (135, 200), (135, 201), (135, 202), (135, 203), (135, 204), (135, 205), (135, 206), (135, 207), (135, 208), (135, 209), (135, 210), (135, 211), (135, 212), (135, 213), (135, 215), (136, 192), (136, 194), (136, 195), (136, 196), (136, 197), (136, 198), (136, 199), (136, 200), (136, 201), (136, 202), (136, 203), (136, 204), (136, 205), (136, 206), (136, 207), (136, 208), (136, 209), (136, 210), (136, 211), (136, 212), (136, 213), (136, 215), (137, 193), (137, 195), (137, 196), (137, 197), (137, 198), (137, 199), (137, 200), (137, 201), (137, 202), (137, 203), (137, 204), (137, 205), (137, 206), (137, 207),
(137, 208), (137, 209), (137, 210), (137, 211), (137, 212), (137, 213), (137, 215), (138, 193), (138, 195), (138, 196), (138, 197), (138, 198), (138, 199), (138, 200), (138, 201), (138, 202), (138, 203), (138, 204), (138, 205), (138, 206), (138, 207), (138, 208), (138, 209), (138, 210), (138, 211), (138, 212), (138, 213), (138, 215), (139, 193), (139, 195), (139, 196), (139, 197), (139, 198), (139, 199), (139, 200), (139, 201), (139, 202), (139, 203), (139, 204), (139, 205), (139, 206), (139, 207), (139, 208), (139, 209), (139, 210), (139, 211), (139, 212), (139, 214), (140, 193), (140, 195), (140, 196), (140, 197), (140, 198), (140, 199), (140, 200), (140, 201), (140, 202), (140, 203), (140, 204), (140, 205), (140, 206), (140, 207), (140, 208), (140, 209), (140, 210), (140, 211), (140, 212), (140, 214), (141, 194), (141, 196), (141, 197), (141, 198),
(141, 199), (141, 200), (141, 201), (141, 202), (141, 203), (141, 204), (141, 205), (141, 206), (141, 207), (141, 208), (141, 209), (141, 210), (141, 211), (141, 213), (142, 194), (142, 196), (142, 197), (142, 198), (142, 199), (142, 200), (142, 201), (142, 202), (142, 203), (142, 204), (142, 205), (142, 206), (142, 207), (142, 208), (142, 209), (142, 210), (142, 211), (142, 213), (143, 194), (143, 195), (143, 196), (143, 197), (143, 198), (143, 199), (143, 200), (143, 201), (143, 202), (143, 203), (143, 204), (143, 205), (143, 206), (143, 207), (143, 208), (143, 209), (143, 210), (143, 211), (143, 213), (144, 195), (144, 197), (144, 198), (144, 199), (144, 200), (144, 201), (144, 202), (144, 203), (144, 204), (144, 205), (144, 206), (144, 207), (144, 208), (144, 209), (144, 210), (144, 211), (144, 213), (145, 195), (145, 197), (145, 198), (145, 199),
(145, 200), (145, 201), (145, 202), (145, 203), (145, 204), (145, 205), (145, 206), (145, 207), (145, 208), (145, 209), (145, 210), (145, 212), (146, 196), (146, 198), (146, 199), (146, 200), (146, 201), (146, 202), (146, 203), (146, 204), (146, 205), (146, 206), (146, 207), (146, 208), (146, 209), (146, 210), (146, 212), (147, 196), (147, 198), (147, 199), (147, 200), (147, 201), (147, 202), (147, 203), (147, 204), (147, 205), (147, 206), (147, 207), (147, 208), (147, 209), (147, 212), (148, 197), (148, 199), (148, 200), (148, 201), (148, 202), (148, 203), (148, 204), (148, 205), (148, 206), (148, 207), (148, 210), (149, 197), (149, 199), (149, 200), (149, 201), (149, 202), (149, 203), (149, 204), (149, 205), (149, 206), (149, 209), (150, 198), (150, 200), (150, 201), (150, 202), (150, 203), (150, 204), (150, 207), (151, 198), (151, 200), (151, 201),
(151, 202), (151, 206), (152, 198), (152, 200), (152, 201), (152, 202), (152, 204), (153, 198), (153, 200), (153, 201), (153, 202), (154, 198), (154, 200), (154, 202), (155, 198), (155, 201), (156, 198), (156, 201), (157, 198), (157, 201), (158, 198), (158, 201), (159, 198), (159, 201), (160, 198), (160, 201), (161, 198), (161, 200), (162, 198), (162, 200), (163, 198), (163, 200), (164, 198), (164, 200), (165, 198), (165, 200), (166, 198), (166, 199), (167, 198), (167, 199), (168, 198), (169, 198), (270, 192), (270, 194), (271, 192), (271, 196), (272, 192), (272, 194), (272, 197), (273, 193), (273, 195), (273, 197), (274, 194), (274, 196), (274, 198), (275, 194), (275, 196), (275, 198), (276, 195), (276, 197), (276, 199), (277, 195), (277, 197), (277, 199), (278, 195), (278, 197), (278, 199), (279, 195), (279, 197), (279, 199), (280, 195), (280, 197),
(280, 199), (281, 195), (281, 197), (281, 198), (281, 200), (282, 195), (282, 197), (282, 198), (282, 200), (283, 195), (283, 197), (283, 198), (283, 200), (284, 195), (284, 197), (284, 198), (284, 200), (285, 195), (285, 197), (285, 198), (285, 200), (286, 195), (286, 197), (286, 198), (286, 200), (287, 195), (287, 197), (287, 198), (287, 200), (288, 195), (288, 197), (288, 198), (288, 200), (289, 195), (289, 197), (289, 198), (289, 199), (289, 200), (289, 201), (290, 195), (290, 197), (290, 198), (290, 199), (290, 201), (291, 195), (291, 197), (291, 198), (291, 199), (291, 201), (292, 195), (292, 197), (292, 198), (292, 199), (292, 201), (293, 195), (293, 197), (293, 198), (293, 199), (293, 200), (293, 202), (294, 195), (294, 197), (294, 198), (294, 199), (294, 200), (294, 202), (295, 195), (295, 197), (295, 198), (295, 199), (295, 200), (295, 201),
(295, 203), (296, 195), (296, 197), (296, 198), (296, 199), (296, 200), (296, 201), (296, 203), (297, 195), (297, 197), (297, 198), (297, 199), (297, 200), (297, 201), (297, 202), (297, 204), (298, 195), (298, 197), (298, 198), (298, 199), (298, 200), (298, 201), (298, 202), (298, 204), (299, 195), (299, 197), (299, 198), (299, 199), (299, 200), (299, 201), (299, 202), (299, 203), (299, 205), (300, 195), (300, 197), (300, 198), (300, 199), (300, 200), (300, 201), (300, 202), (300, 203), (300, 204), (300, 206), (301, 195), (301, 197), (301, 198), (301, 199), (301, 200), (301, 201), (301, 202), (301, 203), (301, 204), (301, 205), (301, 207), (302, 195), (302, 197), (302, 198), (302, 199), (302, 200), (302, 201), (302, 202), (302, 203), (302, 204), (302, 205), (303, 195), (303, 197), (303, 198), (303, 199), (303, 200), (303, 201), (303, 202), (303, 203),
(303, 204), (303, 205), (303, 206), (303, 208), (304, 195), (304, 197), (304, 198), (304, 199), (304, 200), (304, 201), (304, 202), (304, 203), (304, 204), (304, 205), (304, 206), (304, 207), (304, 209), (305, 195), (305, 197), (305, 198), (305, 199), (305, 200), (305, 201), (305, 202), (305, 203), (305, 204), (305, 205), (305, 206), (305, 207), (305, 208), (305, 210), (306, 195), (306, 197), (306, 198), (306, 199), (306, 200), (306, 201), (306, 202), (306, 203), (306, 204), (306, 205), (306, 206), (306, 207), (306, 208), (306, 209), (306, 211), (307, 195), (307, 197), (307, 198), (307, 199), (307, 200), (307, 201), (307, 202), (307, 203), (307, 204), (307, 205), (307, 206), (307, 207), (307, 208), (307, 209), (307, 211), (308, 195), (308, 197), (308, 198), (308, 199), (308, 200), (308, 201), (308, 202), (308, 203), (308, 204), (308, 205), (308, 206),
(308, 207), (308, 208), (308, 209), (308, 210), (308, 212), (309, 195), (309, 197), (309, 198), (309, 199), (309, 200), (309, 201), (309, 202), (309, 203), (309, 204), (309, 205), (309, 206), (309, 207), (309, 208), (309, 209), (309, 210), (309, 211), (309, 213), (310, 195), (310, 197), (310, 198), (310, 199), (310, 200), (310, 201), (310, 202), (310, 203), (310, 204), (310, 205), (310, 206), (310, 207), (310, 208), (310, 209), (310, 210), (310, 211), (310, 213), (311, 194), (311, 196), (311, 197), (311, 198), (311, 199), (311, 200), (311, 201), (311, 202), (311, 203), (311, 204), (311, 205), (311, 206), (311, 207), (311, 208), (311, 209), (311, 210), (311, 211), (311, 212), (311, 214), (312, 194), (312, 196), (312, 197), (312, 198), (312, 199), (312, 200), (312, 201), (312, 202), (312, 203), (312, 204), (312, 205), (312, 206), (312, 207), (312, 208),
(312, 209), (312, 210), (312, 211), (312, 212), (312, 214), (313, 194), (313, 196), (313, 197), (313, 198), (313, 199), (313, 200), (313, 201), (313, 202), (313, 203), (313, 204), (313, 205), (313, 206), (313, 207), (313, 208), (313, 209), (313, 210), (313, 211), (313, 212), (313, 214), (314, 193), (314, 195), (314, 196), (314, 197), (314, 198), (314, 199), (314, 200), (314, 201), (314, 202), (314, 203), (314, 204), (314, 205), (314, 206), (314, 207), (314, 208), (314, 209), (314, 210), (314, 211), (314, 213), (315, 193), (315, 195), (315, 196), (315, 197), (315, 198), (315, 199), (315, 200), (315, 201), (315, 202), (315, 203), (315, 204), (315, 205), (315, 206), (315, 207), (315, 208), (315, 209), (315, 210), (315, 211), (315, 213), (316, 192), (316, 194), (316, 195), (316, 196), (316, 197), (316, 198), (316, 199), (316, 200), (316, 201), (316, 202),
(316, 203), (316, 204), (316, 205), (316, 206), (316, 207), (316, 208), (316, 209), (316, 210), (316, 212), (317, 192), (317, 194), (317, 195), (317, 196), (317, 197), (317, 198), (317, 199), (317, 200), (317, 201), (317, 202), (317, 203), (317, 204), (317, 205), (317, 206), (317, 207), (317, 208), (317, 209), (317, 210), (317, 212), (318, 191), (318, 193), (318, 194), (318, 195), (318, 196), (318, 197), (318, 198), (318, 199), (318, 200), (318, 201), (318, 202), (318, 203), (318, 204), (318, 205), (318, 206), (318, 207), (318, 208), (318, 209), (318, 211), (319, 190), (319, 192), (319, 193), (319, 194), (319, 195), (319, 196), (319, 197), (319, 198), (319, 199), (319, 200), (319, 201), (319, 202), (319, 203), (319, 204), (319, 205), (319, 206), (319, 207), (319, 208), (319, 209), (319, 211), (320, 189), (320, 191), (320, 192), (320, 193), (320, 194),
(320, 195), (320, 196), (320, 197), (320, 198), (320, 199), (320, 200), (320, 201), (320, 202), (320, 203), (320, 204), (320, 205), (320, 206), (320, 207), (320, 208), (320, 209), (320, 211), (321, 188), (321, 190), (321, 191), (321, 192), (321, 193), (321, 194), (321, 195), (321, 196), (321, 197), (321, 198), (321, 199), (321, 200), (321, 201), (321, 202), (321, 203), (321, 204), (321, 205), (321, 206), (321, 207), (321, 208), (321, 209), (321, 211), (322, 186), (322, 189), (322, 190), (322, 191), (322, 192), (322, 193), (322, 194), (322, 195), (322, 196), (322, 197), (322, 198), (322, 199), (322, 200), (322, 201), (322, 202), (322, 203), (322, 204), (322, 205), (322, 206), (322, 207), (322, 208), (322, 209), (322, 211), (323, 185), (323, 188), (323, 189), (323, 190), (323, 191), (323, 192), (323, 193), (323, 194), (323, 195), (323, 196), (323, 197),
(323, 198), (323, 199), (323, 200), (323, 201), (323, 202), (323, 203), (323, 204), (323, 205), (323, 206), (323, 207), (323, 208), (323, 209), (323, 211), (324, 182), (324, 183), (324, 186), (324, 187), (324, 188), (324, 189), (324, 190), (324, 191), (324, 192), (324, 193), (324, 194), (324, 195), (324, 196), (324, 197), (324, 198), (324, 199), (324, 200), (324, 201), (324, 202), (324, 203), (324, 204), (324, 205), (324, 206), (324, 207), (324, 208), (324, 209), (324, 211), (325, 179), (325, 180), (325, 181), (325, 184), (325, 185), (325, 186), (325, 187), (325, 188), (325, 189), (325, 190), (325, 191), (325, 192), (325, 193), (325, 194), (325, 195), (325, 196), (325, 197), (325, 198), (325, 199), (325, 200), (325, 201), (325, 202), (325, 203), (325, 204), (325, 205), (325, 206), (325, 207), (325, 208), (325, 209), (325, 211), (326, 173), (326, 175),
(326, 176), (326, 177), (326, 178), (326, 182), (326, 183), (326, 184), (326, 185), (326, 186), (326, 187), (326, 188), (326, 189), (326, 190), (326, 191), (326, 192), (326, 193), (326, 194), (326, 195), (326, 196), (326, 197), (326, 198), (326, 199), (326, 200), (326, 201), (326, 202), (326, 203), (326, 204), (326, 205), (326, 206), (326, 207), (326, 208), (326, 210), (327, 173), (327, 179), (327, 180), (327, 181), (327, 182), (327, 183), (327, 184), (327, 185), (327, 186), (327, 187), (327, 188), (327, 189), (327, 190), (327, 191), (327, 192), (327, 193), (327, 194), (327, 195), (327, 196), (327, 197), (327, 198), (327, 199), (327, 200), (327, 201), (327, 202), (327, 203), (327, 204), (327, 205), (327, 206), (327, 207), (327, 208), (327, 210), (328, 174), (328, 176), (328, 177), (328, 178), (328, 179), (328, 180), (328, 181), (328, 182), (328, 183),
(328, 184), (328, 185), (328, 186), (328, 187), (328, 188), (328, 189), (328, 190), (328, 191), (328, 192), (328, 193), (328, 194), (328, 195), (328, 196), (328, 197), (328, 198), (328, 199), (328, 200), (328, 201), (328, 202), (328, 203), (328, 204), (328, 205), (328, 206), (328, 207), (328, 209), (329, 174), (329, 176), (329, 177), (329, 178), (329, 179), (329, 180), (329, 181), (329, 182), (329, 183), (329, 184), (329, 185), (329, 186), (329, 187), (329, 188), (329, 189), (329, 190), (329, 191), (329, 192), (329, 193), (329, 194), (329, 195), (329, 196), (329, 197), (329, 198), (329, 199), (329, 200), (329, 201), (329, 202), (329, 203), (329, 204), (329, 205), (329, 206), (329, 208), (330, 175), (330, 177), (330, 178), (330, 179), (330, 180), (330, 181), (330, 182), (330, 183), (330, 184), (330, 185), (330, 186), (330, 187), (330, 188), (330, 189),
(330, 190), (330, 191), (330, 192), (330, 193), (330, 194), (330, 195), (330, 196), (330, 197), (330, 198), (330, 199), (330, 200), (330, 201), (330, 202), (330, 203), (330, 204), (330, 205), (330, 206), (330, 208), (331, 175), (331, 177), (331, 178), (331, 179), (331, 180), (331, 181), (331, 182), (331, 183), (331, 184), (331, 185), (331, 186), (331, 187), (331, 188), (331, 189), (331, 190), (331, 191), (331, 192), (331, 193), (331, 194), (331, 195), (331, 196), (331, 197), (331, 198), (331, 199), (331, 200), (331, 201), (331, 202), (331, 203), (331, 204), (331, 205), (331, 207), (332, 176), (332, 178), (332, 179), (332, 180), (332, 181), (332, 182), (332, 183), (332, 184), (332, 185), (332, 186), (332, 187), (332, 188), (332, 189), (332, 190), (332, 191), (332, 192), (332, 193), (332, 194), (332, 195), (332, 196), (332, 197), (332, 198), (332, 199),
(332, 200), (332, 201), (332, 202), (332, 203), (332, 204), (332, 205), (332, 207), (333, 177), (333, 179), (333, 180), (333, 181), (333, 182), (333, 183), (333, 184), (333, 185), (333, 186), (333, 187), (333, 188), (333, 189), (333, 190), (333, 191), (333, 192), (333, 193), (333, 194), (333, 195), (333, 196), (333, 197), (333, 198), (333, 199), (333, 200), (333, 201), (333, 202), (333, 203), (333, 204), (333, 206), (334, 178), (334, 180), (334, 181), (334, 182), (334, 183), (334, 184), (334, 185), (334, 186), (334, 187), (334, 188), (334, 189), (334, 190), (334, 191), (334, 192), (334, 193), (334, 194), (334, 195), (334, 196), (334, 197), (334, 198), (334, 199), (334, 200), (334, 201), (334, 202), (334, 203), (334, 205), (335, 179), (335, 181), (335, 182), (335, 183), (335, 184), (335, 185), (335, 186), (335, 187), (335, 188), (335, 189), (335, 190),
(335, 191), (335, 192), (335, 193), (335, 194), (335, 195), (335, 196), (335, 197), (335, 198), (335, 199), (335, 200), (335, 201), (335, 202), (336, 181), (336, 182), (336, 183), (336, 184), (336, 185), (336, 186), (336, 187), (336, 188), (336, 189), (336, 190), (336, 191), (336, 192), (336, 193), (336, 194), (336, 195), (336, 196), (336, 197), (336, 198), (336, 199), (336, 200), (336, 201), (336, 204), (337, 180), (337, 182), (337, 183), (337, 184), (337, 185), (337, 186), (337, 187), (337, 188), (337, 189), (337, 190), (337, 191), (337, 192), (337, 193), (337, 194), (337, 195), (337, 196), (337, 197), (337, 198), (337, 199), (337, 200), (337, 203), (338, 181), (338, 183), (338, 184), (338, 185), (338, 186), (338, 187), (338, 188), (338, 189), (338, 190), (338, 191), (338, 192), (338, 193), (338, 194), (338, 195), (338, 196), (338, 197), (338, 198),
(338, 201), (339, 181), (339, 183), (339, 184), (339, 185), (339, 186), (339, 187), (339, 188), (339, 189), (339, 190), (339, 191), (339, 192), (339, 193), (339, 194), (339, 195), (339, 196), (339, 200), (340, 182), (340, 184), (340, 185), (340, 186), (340, 187), (340, 188), (340, 189), (340, 190), (340, 191), (340, 198), (341, 182), (341, 191), (341, 192), (341, 193), (341, 194), (341, 195), (342, 182), (342, 184), (342, 185), (342, 186), (342, 187), (342, 188), (342, 189), (342, 190), )
coordinates_996666 = ((162, 121),
(162, 123), (162, 124), (162, 125), (162, 126), (162, 127), (162, 128), (162, 129), (162, 130), (162, 131), (162, 132), (162, 133), (162, 134), (162, 135), (162, 136), (162, 137), (162, 139), (163, 109), (163, 112), (163, 120), (163, 140), (164, 108), (164, 113), (164, 114), (164, 115), (164, 119), (164, 121), (164, 122), (164, 123), (164, 124), (164, 125), (164, 126), (164, 127), (164, 128), (164, 129), (164, 130), (164, 131), (164, 132), (164, 133), (164, 134), (164, 135), (164, 136), (164, 137), (164, 138), (164, 139), (164, 141), (165, 108), (165, 110), (165, 111), (165, 112), (165, 116), (165, 117), (165, 120), (165, 121), (165, 122), (165, 123), (165, 124), (165, 125), (165, 126), (165, 127), (165, 128), (165, 129), (165, 130), (165, 131), (165, 132), (165, 133), (165, 134), (165, 135), (165, 136), (165, 137), (165, 138), (165, 139), (165, 141),
(166, 108), (166, 110), (166, 111), (166, 112), (166, 113), (166, 114), (166, 115), (166, 119), (166, 120), (166, 121), (166, 122), (166, 123), (166, 124), (166, 125), (166, 126), (166, 127), (166, 128), (166, 129), (166, 130), (166, 131), (166, 132), (166, 133), (166, 134), (166, 135), (166, 136), (166, 137), (166, 138), (166, 139), (166, 140), (166, 142), (167, 108), (167, 110), (167, 111), (167, 112), (167, 113), (167, 114), (167, 115), (167, 116), (167, 117), (167, 118), (167, 119), (167, 120), (167, 121), (167, 122), (167, 123), (167, 124), (167, 125), (167, 126), (167, 127), (167, 128), (167, 129), (167, 130), (167, 131), (167, 132), (167, 133), (167, 134), (167, 135), (167, 136), (167, 137), (167, 138), (167, 139), (167, 140), (167, 141), (167, 142), (168, 108), (168, 116), (168, 117), (168, 118), (168, 119), (168, 120), (168, 121), (168, 122),
(168, 123), (168, 124), (168, 125), (168, 126), (168, 127), (168, 128), (168, 129), (168, 130), (168, 131), (168, 132), (168, 133), (168, 134), (168, 135), (168, 136), (168, 137), (168, 138), (168, 139), (168, 140), (168, 141), (168, 143), (169, 108), (169, 110), (169, 111), (169, 112), (169, 113), (169, 114), (169, 115), (169, 117), (169, 118), (169, 119), (169, 120), (169, 121), (169, 122), (169, 123), (169, 124), (169, 125), (169, 126), (169, 127), (169, 128), (169, 129), (169, 130), (169, 131), (169, 132), (169, 133), (169, 134), (169, 135), (169, 136), (169, 137), (169, 138), (169, 139), (169, 140), (169, 141), (169, 142), (169, 144), (170, 116), (170, 118), (170, 119), (170, 120), (170, 121), (170, 122), (170, 123), (170, 124), (170, 125), (170, 126), (170, 127), (170, 128), (170, 129), (170, 130), (170, 131), (170, 132), (170, 133), (170, 134),
(170, 135), (170, 136), (170, 137), (170, 138), (170, 139), (170, 140), (170, 141), (170, 142), (170, 144), (171, 117), (171, 119), (171, 120), (171, 121), (171, 122), (171, 123), (171, 124), (171, 125), (171, 126), (171, 127), (171, 128), (171, 129), (171, 130), (171, 131), (171, 132), (171, 133), (171, 134), (171, 135), (171, 136), (171, 137), (171, 138), (171, 139), (171, 140), (171, 141), (171, 142), (171, 143), (171, 144), (171, 145), (172, 117), (172, 119), (172, 120), (172, 121), (172, 122), (172, 123), (172, 124), (172, 125), (172, 126), (172, 127), (172, 128), (172, 129), (172, 130), (172, 131), (172, 132), (172, 133), (172, 134), (172, 135), (172, 136), (172, 137), (172, 138), (172, 139), (172, 140), (172, 141), (172, 142), (172, 143), (172, 145), (173, 116), (173, 118), (173, 119), (173, 120), (173, 121), (173, 122), (173, 123), (173, 124),
(173, 125), (173, 126), (173, 127), (173, 128), (173, 129), (173, 130), (173, 131), (173, 132), (173, 133), (173, 134), (173, 135), (173, 136), (173, 137), (173, 138), (173, 139), (173, 140), (173, 141), (173, 142), (173, 143), (173, 145), (174, 116), (174, 118), (174, 119), (174, 120), (174, 121), (174, 122), (174, 123), (174, 124), (174, 125), (174, 126), (174, 127), (174, 128), (174, 129), (174, 130), (174, 131), (174, 132), (174, 133), (174, 134), (174, 135), (174, 136), (174, 137), (174, 138), (174, 139), (174, 140), (174, 141), (174, 142), (174, 143), (174, 144), (174, 146), (175, 115), (175, 117), (175, 118), (175, 119), (175, 120), (175, 121), (175, 122), (175, 123), (175, 124), (175, 125), (175, 126), (175, 127), (175, 128), (175, 129), (175, 130), (175, 131), (175, 132), (175, 133), (175, 134), (175, 135), (175, 136), (175, 137), (175, 138),
(175, 139), (175, 140), (175, 141), (175, 142), (175, 143), (175, 144), (175, 146), (176, 114), (176, 116), (176, 117), (176, 118), (176, 119), (176, 120), (176, 121), (176, 122), (176, 123), (176, 124), (176, 125), (176, 126), (176, 127), (176, 128), (176, 129), (176, 130), (176, 131), (176, 132), (176, 133), (176, 134), (176, 135), (176, 136), (176, 137), (176, 138), (176, 139), (176, 140), (176, 141), (176, 142), (176, 143), (176, 144), (176, 145), (176, 147), (177, 113), (177, 115), (177, 116), (177, 117), (177, 118), (177, 119), (177, 120), (177, 121), (177, 122), (177, 123), (177, 124), (177, 125), (177, 126), (177, 127), (177, 128), (177, 129), (177, 130), (177, 131), (177, 132), (177, 133), (177, 134), (177, 135), (177, 136), (177, 137), (177, 138), (177, 139), (177, 140), (177, 141), (177, 142), (177, 143), (177, 144), (177, 145), (177, 147),
(178, 112), (178, 115), (178, 116), (178, 117), (178, 118), (178, 119), (178, 120), (178, 121), (178, 122), (178, 123), (178, 124), (178, 125), (178, 126), (178, 127), (178, 128), (178, 129), (178, 130), (178, 131), (178, 132), (178, 133), (178, 134), (178, 135), (178, 136), (178, 137), (178, 138), (178, 139), (178, 140), (178, 141), (178, 142), (178, 143), (178, 144), (178, 145), (178, 146), (178, 148), (179, 111), (179, 114), (179, 115), (179, 116), (179, 117), (179, 118), (179, 119), (179, 120), (179, 121), (179, 122), (179, 123), (179, 124), (179, 125), (179, 126), (179, 127), (179, 128), (179, 129), (179, 130), (179, 131), (179, 132), (179, 133), (179, 134), (179, 135), (179, 136), (179, 137), (179, 138), (179, 139), (179, 140), (179, 141), (179, 142), (179, 143), (179, 144), (179, 145), (179, 146), (179, 148), (180, 110), (180, 113), (180, 114),
(180, 115), (180, 116), (180, 117), (180, 118), (180, 119), (180, 120), (180, 121), (180, 122), (180, 123), (180, 124), (180, 125), (180, 126), (180, 127), (180, 128), (180, 129), (180, 130), (180, 131), (180, 132), (180, 133), (180, 134), (180, 135), (180, 136), (180, 137), (180, 138), (180, 139), (180, 140), (180, 141), (180, 142), (180, 143), (180, 144), (180, 145), (180, 146), (180, 147), (180, 149), (181, 109), (181, 112), (181, 113), (181, 114), (181, 115), (181, 116), (181, 117), (181, 118), (181, 119), (181, 120), (181, 121), (181, 122), (181, 123), (181, 124), (181, 125), (181, 126), (181, 127), (181, 128), (181, 129), (181, 130), (181, 131), (181, 132), (181, 133), (181, 134), (181, 135), (181, 136), (181, 137), (181, 138), (181, 139), (181, 140), (181, 141), (181, 142), (181, 143), (181, 144), (181, 145), (181, 146), (181, 147), (181, 149),
(182, 108), (182, 111), (182, 112), (182, 113), (182, 114), (182, 115), (182, 116), (182, 117), (182, 118), (182, 119), (182, 120), (182, 121), (182, 122), (182, 123), (182, 124), (182, 125), (182, 126), (182, 127), (182, 128), (182, 129), (182, 130), (182, 131), (182, 132), (182, 133), (182, 134), (182, 135), (182, 136), (182, 137), (182, 138), (182, 139), (182, 140), (182, 141), (182, 142), (182, 143), (182, 144), (182, 145), (182, 146), (182, 147), (182, 148), (182, 150), (183, 107), (183, 109), (183, 110), (183, 111), (183, 112), (183, 113), (183, 114), (183, 115), (183, 116), (183, 117), (183, 118), (183, 119), (183, 120), (183, 121), (183, 122), (183, 123), (183, 124), (183, 125), (183, 126), (183, 127), (183, 128), (183, 129), (183, 130), (183, 131), (183, 132), (183, 133), (183, 134), (183, 135), (183, 136), (183, 137), (183, 138), (183, 139),
(183, 140), (183, 141), (183, 142), (183, 143), (183, 144), (183, 145), (183, 146), (183, 147), (183, 148), (183, 149), (183, 151), (184, 98), (184, 99), (184, 106), (184, 108), (184, 109), (184, 110), (184, 111), (184, 112), (184, 113), (184, 114), (184, 115), (184, 116), (184, 117), (184, 118), (184, 119), (184, 120), (184, 121), (184, 122), (184, 123), (184, 124), (184, 125), (184, 126), (184, 127), (184, 128), (184, 129), (184, 130), (184, 131), (184, 132), (184, 133), (184, 134), (184, 135), (184, 136), (184, 137), (184, 138), (184, 139), (184, 140), (184, 141), (184, 142), (184, 143), (184, 144), (184, 145), (184, 146), (184, 147), (184, 148), (184, 149), (184, 150), (184, 152), (185, 98), (185, 100), (185, 104), (185, 107), (185, 108), (185, 109), (185, 110), (185, 111), (185, 112), (185, 113), (185, 114), (185, 115), (185, 116), (185, 117),
(185, 118), (185, 119), (185, 120), (185, 121), (185, 122), (185, 123), (185, 124), (185, 125), (185, 126), (185, 127), (185, 128), (185, 129), (185, 130), (185, 131), (185, 132), (185, 133), (185, 134), (185, 135), (185, 136), (185, 137), (185, 138), (185, 139), (185, 140), (185, 141), (185, 142), (185, 143), (185, 144), (185, 145), (185, 146), (185, 147), (185, 148), (185, 149), (185, 150), (185, 151), (185, 153), (186, 98), (186, 102), (186, 103), (186, 106), (186, 107), (186, 108), (186, 109), (186, 110), (186, 111), (186, 112), (186, 113), (186, 114), (186, 115), (186, 116), (186, 117), (186, 118), (186, 119), (186, 120), (186, 121), (186, 122), (186, 123), (186, 124), (186, 125), (186, 126), (186, 127), (186, 128), (186, 129), (186, 130), (186, 131), (186, 132), (186, 133), (186, 134), (186, 135), (186, 136), (186, 137), (186, 138), (186, 139),
(186, 140), (186, 141), (186, 142), (186, 143), (186, 144), (186, 145), (186, 146), (186, 147), (186, 148), (186, 149), (186, 150), (186, 151), (186, 154), (187, 98), (187, 100), (187, 104), (187, 105), (187, 106), (187, 107), (187, 108), (187, 109), (187, 110), (187, 111), (187, 112), (187, 113), (187, 114), (187, 115), (187, 116), (187, 117), (187, 118), (187, 119), (187, 120), (187, 121), (187, 122), (187, 123), (187, 124), (187, 125), (187, 126), (187, 127), (187, 128), (187, 129), (187, 130), (187, 131), (187, 132), (187, 133), (187, 134), (187, 135), (187, 136), (187, 137), (187, 138), (187, 139), (187, 140), (187, 141), (187, 142), (187, 143), (187, 144), (187, 145), (187, 146), (187, 147), (187, 148), (187, 149), (187, 150), (187, 151), (187, 152), (187, 154), (188, 98), (188, 100), (188, 101), (188, 102), (188, 103), (188, 104), (188, 105),
(188, 106), (188, 107), (188, 108), (188, 109), (188, 110), (188, 111), (188, 112), (188, 113), (188, 114), (188, 115), (188, 116), (188, 117), (188, 118), (188, 119), (188, 120), (188, 121), (188, 122), (188, 123), (188, 124), (188, 125), (188, 126), (188, 127), (188, 128), (188, 129), (188, 130), (188, 131), (188, 132), (188, 133), (188, 134), (188, 135), (188, 136), (188, 137), (188, 138), (188, 139), (188, 140), (188, 141), (188, 142), (188, 143), (188, 144), (188, 145), (188, 146), (188, 147), (188, 148), (188, 149), (188, 150), (188, 151), (188, 152), (188, 153), (188, 155), (189, 98), (189, 100), (189, 101), (189, 102), (189, 103), (189, 104), (189, 105), (189, 106), (189, 107), (189, 108), (189, 109), (189, 110), (189, 111), (189, 112), (189, 113), (189, 114), (189, 115), (189, 116), (189, 117), (189, 118), (189, 119), (189, 120), (189, 121),
(189, 122), (189, 123), (189, 124), (189, 125), (189, 126), (189, 127), (189, 128), (189, 129), (189, 130), (189, 131), (189, 132), (189, 133), (189, 134), (189, 135), (189, 136), (189, 137), (189, 138), (189, 139), (189, 140), (189, 141), (189, 142), (189, 143), (189, 144), (189, 145), (189, 146), (189, 147), (189, 148), (189, 149), (189, 150), (189, 151), (189, 152), (189, 153), (189, 155), (190, 98), (190, 100), (190, 101), (190, 102), (190, 103), (190, 104), (190, 105), (190, 106), (190, 107), (190, 108), (190, 109), (190, 110), (190, 111), (190, 112), (190, 113), (190, 114), (190, 115), (190, 116), (190, 117), (190, 118), (190, 119), (190, 120), (190, 121), (190, 122), (190, 123), (190, 124), (190, 125), (190, 126), (190, 127), (190, 128), (190, 129), (190, 130), (190, 131), (190, 132), (190, 133), (190, 134), (190, 135), (190, 136), (190, 137),
(190, 138), (190, 139), (190, 140), (190, 141), (190, 142), (190, 143), (190, 144), (190, 145), (190, 146), (190, 147), (190, 148), (190, 149), (190, 150), (190, 151), (190, 152), (190, 153), (190, 155), (191, 98), (191, 100), (191, 101), (191, 102), (191, 103), (191, 104), (191, 105), (191, 106), (191, 107), (191, 108), (191, 109), (191, 110), (191, 111), (191, 112), (191, 113), (191, 114), (191, 115), (191, 116), (191, 117), (191, 118), (191, 119), (191, 120), (191, 121), (191, 122), (191, 123), (191, 124), (191, 125), (191, 126), (191, 127), (191, 128), (191, 129), (191, 130), (191, 131), (191, 132), (191, 133), (191, 134), (191, 135), (191, 136), (191, 137), (191, 138), (191, 139), (191, 140), (191, 141), (191, 142), (191, 143), (191, 144), (191, 145), (191, 146), (191, 147), (191, 148), (191, 149), (191, 150), (191, 151), (191, 152), (191, 153),
(191, 155), (192, 98), (192, 100), (192, 101), (192, 102), (192, 103), (192, 104), (192, 105), (192, 106), (192, 107), (192, 108), (192, 109), (192, 110), (192, 111), (192, 112), (192, 113), (192, 114), (192, 115), (192, 116), (192, 117), (192, 118), (192, 119), (192, 120), (192, 121), (192, 122), (192, 123), (192, 124), (192, 125), (192, 126), (192, 127), (192, 128), (192, 129), (192, 130), (192, 131), (192, 132), (192, 133), (192, 134), (192, 135), (192, 136), (192, 137), (192, 138), (192, 139), (192, 140), (192, 141), (192, 142), (192, 143), (192, 144), (192, 145), (192, 146), (192, 147), (192, 148), (192, 149), (192, 150), (192, 151), (192, 152), (192, 153), (192, 155), (193, 98), (193, 100), (193, 101), (193, 102), (193, 103), (193, 104), (193, 105), (193, 106), (193, 107), (193, 108), (193, 109), (193, 110), (193, 111), (193, 112), (193, 113),
(193, 114), (193, 115), (193, 116), (193, 117), (193, 118), (193, 119), (193, 120), (193, 121), (193, 122), (193, 123), (193, 124), (193, 125), (193, 126), (193, 127), (193, 128), (193, 129), (193, 130), (193, 131), (193, 132), (193, 133), (193, 134), (193, 135), (193, 136), (193, 137), (193, 138), (193, 139), (193, 140), (193, 141), (193, 142), (193, 143), (193, 144), (193, 145), (193, 146), (193, 147), (193, 148), (193, 149), (193, 150), (193, 151), (193, 152), (193, 153), (193, 155), (194, 98), (194, 100), (194, 101), (194, 102), (194, 103), (194, 104), (194, 105), (194, 106), (194, 107), (194, 108), (194, 109), (194, 110), (194, 111), (194, 112), (194, 113), (194, 114), (194, 115), (194, 116), (194, 117), (194, 118), (194, 119), (194, 120), (194, 121), (194, 122), (194, 123), (194, 124), (194, 125), (194, 126), (194, 127), (194, 128), (194, 129),
(194, 130), (194, 131), (194, 132), (194, 133), (194, 134), (194, 135), (194, 136), (194, 137), (194, 138), (194, 139), (194, 140), (194, 141), (194, 142), (194, 143), (194, 144), (194, 145), (194, 146), (194, 147), (194, 148), (194, 149), (194, 150), (194, 151), (194, 152), (194, 153), (194, 155), (195, 98), (195, 100), (195, 101), (195, 102), (195, 103), (195, 104), (195, 105), (195, 106), (195, 107), (195, 108), (195, 109), (195, 110), (195, 111), (195, 112), (195, 113), (195, 114), (195, 115), (195, 116), (195, 117), (195, 118), (195, 119), (195, 120), (195, 121), (195, 122), (195, 123), (195, 124), (195, 125), (195, 126), (195, 127), (195, 128), (195, 129), (195, 130), (195, 131), (195, 132), (195, 133), (195, 134), (195, 135), (195, 136), (195, 137), (195, 138), (195, 139), (195, 140), (195, 141), (195, 142), (195, 143), (195, 144), (195, 145),
(195, 146), (195, 147), (195, 148), (195, 149), (195, 150), (195, 151), (195, 152), (195, 153), (195, 155), (196, 98), (196, 100), (196, 101), (196, 102), (196, 103), (196, 104), (196, 105), (196, 106), (196, 107), (196, 108), (196, 109), (196, 110), (196, 111), (196, 112), (196, 113), (196, 114), (196, 115), (196, 116), (196, 117), (196, 118), (196, 119), (196, 120), (196, 121), (196, 122), (196, 123), (196, 124), (196, 125), (196, 126), (196, 127), (196, 128), (196, 129), (196, 130), (196, 131), (196, 132), (196, 133), (196, 134), (196, 135), (196, 136), (196, 137), (196, 138), (196, 139), (196, 140), (196, 141), (196, 142), (196, 143), (196, 144), (196, 145), (196, 146), (196, 147), (196, 148), (196, 149), (196, 150), (196, 151), (196, 152), (196, 153), (196, 155), (197, 98), (197, 100), (197, 101), (197, 102), (197, 103), (197, 104), (197, 105),
(197, 106), (197, 107), (197, 108), (197, 109), (197, 110), (197, 111), (197, 112), (197, 113), (197, 114), (197, 115), (197, 116), (197, 117), (197, 118), (197, 119), (197, 120), (197, 121), (197, 122), (197, 123), (197, 124), (197, 125), (197, 126), (197, 127), (197, 128), (197, 129), (197, 130), (197, 131), (197, 132), (197, 133), (197, 134), (197, 135), (197, 136), (197, 137), (197, 138), (197, 139), (197, 140), (197, 141), (197, 142), (197, 143), (197, 144), (197, 145), (197, 146), (197, 147), (197, 148), (197, 149), (197, 150), (197, 151), (197, 152), (197, 153), (197, 155), (198, 99), (198, 101), (198, 102), (198, 103), (198, 104), (198, 105), (198, 106), (198, 107), (198, 108), (198, 109), (198, 110), (198, 111), (198, 112), (198, 113), (198, 114), (198, 115), (198, 116), (198, 117), (198, 118), (198, 119), (198, 120), (198, 121), (198, 122),
(198, 123), (198, 124), (198, 125), (198, 126), (198, 127), (198, 128), (198, 129), (198, 130), (198, 131), (198, 132), (198, 133), (198, 134), (198, 135), (198, 136), (198, 137), (198, 138), (198, 139), (198, 140), (198, 141), (198, 142), (198, 143), (198, 144), (198, 145), (198, 146), (198, 147), (198, 148), (198, 149), (198, 150), (198, 151), (198, 152), (198, 153), (198, 155), (199, 100), (199, 102), (199, 103), (199, 104), (199, 105), (199, 106), (199, 107), (199, 108), (199, 109), (199, 110), (199, 111), (199, 112), (199, 113), (199, 114), (199, 115), (199, 116), (199, 117), (199, 118), (199, 119), (199, 120), (199, 121), (199, 122), (199, 123), (199, 124), (199, 125), (199, 126), (199, 127), (199, 128), (199, 129), (199, 130), (199, 131), (199, 132), (199, 133), (199, 134), (199, 135), (199, 136), (199, 137), (199, 138), (199, 139), (199, 140),
(199, 141), (199, 142), (199, 143), (199, 144), (199, 145), (199, 146), (199, 147), (199, 148), (199, 149), (199, 150), (199, 151), (199, 152), (199, 153), (199, 155), (200, 100), (200, 102), (200, 103), (200, 104), (200, 105), (200, 106), (200, 107), (200, 108), (200, 109), (200, 110), (200, 111), (200, 112), (200, 113), (200, 114), (200, 115), (200, 116), (200, 117), (200, 118), (200, 119), (200, 120), (200, 121), (200, 122), (200, 123), (200, 124), (200, 125), (200, 126), (200, 127), (200, 128), (200, 129), (200, 130), (200, 131), (200, 132), (200, 133), (200, 134), (200, 135), (200, 136), (200, 137), (200, 138), (200, 139), (200, 140), (200, 141), (200, 142), (200, 143), (200, 144), (200, 145), (200, 146), (200, 147), (200, 148), (200, 149), (200, 150), (200, 151), (200, 152), (200, 153), (200, 155), (201, 101), (201, 103), (201, 104), (201, 105),
(201, 106), (201, 107), (201, 108), (201, 109), (201, 110), (201, 111), (201, 112), (201, 113), (201, 114), (201, 115), (201, 116), (201, 117), (201, 118), (201, 119), (201, 120), (201, 121), (201, 122), (201, 123), (201, 124), (201, 125), (201, 126), (201, 127), (201, 128), (201, 129), (201, 130), (201, 131), (201, 132), (201, 133), (201, 134), (201, 135), (201, 136), (201, 137), (201, 138), (201, 139), (201, 140), (201, 141), (201, 142), (201, 143), (201, 144), (201, 145), (201, 146), (201, 147), (201, 148), (201, 149), (201, 150), (201, 151), (201, 152), (201, 153), (201, 155), (202, 102), (202, 104), (202, 105), (202, 106), (202, 107), (202, 108), (202, 109), (202, 110), (202, 111), (202, 112), (202, 113), (202, 114), (202, 115), (202, 116), (202, 117), (202, 118), (202, 119), (202, 120), (202, 121), (202, 122), (202, 123), (202, 124), (202, 125),
(202, 126), (202, 127), (202, 128), (202, 129), (202, 130), (202, 131), (202, 132), (202, 133), (202, 134), (202, 135), (202, 136), (202, 137), (202, 138), (202, 139), (202, 140), (202, 141), (202, 142), (202, 143), (202, 144), (202, 145), (202, 146), (202, 147), (202, 148), (202, 149), (202, 150), (202, 151), (202, 152), (202, 153), (202, 155), (203, 102), (203, 104), (203, 105), (203, 106), (203, 107), (203, 108), (203, 109), (203, 110), (203, 111), (203, 112), (203, 113), (203, 114), (203, 115), (203, 116), (203, 117), (203, 118), (203, 119), (203, 120), (203, 121), (203, 122), (203, 123), (203, 124), (203, 125), (203, 126), (203, 127), (203, 128), (203, 129), (203, 130), (203, 131), (203, 132), (203, 133), (203, 134), (203, 135), (203, 136), (203, 137), (203, 138), (203, 139), (203, 140), (203, 141), (203, 142), (203, 143), (203, 144), (203, 145),
(203, 146), (203, 147), (203, 148), (203, 149), (203, 150), (203, 151), (203, 152), (203, 153), (203, 155), (204, 102), (204, 104), (204, 105), (204, 106), (204, 107), (204, 108), (204, 109), (204, 110), (204, 111), (204, 112), (204, 113), (204, 114), (204, 115), (204, 116), (204, 117), (204, 118), (204, 119), (204, 120), (204, 121), (204, 122), (204, 123), (204, 124), (204, 125), (204, 126), (204, 127), (204, 128), (204, 129), (204, 130), (204, 131), (204, 132), (204, 133), (204, 134), (204, 135), (204, 136), (204, 137), (204, 138), (204, 139), (204, 140), (204, 141), (204, 142), (204, 143), (204, 144), (204, 145), (204, 146), (204, 147), (204, 148), (204, 149), (204, 150), (204, 151), (204, 152), (204, 153), (204, 155), (205, 102), (205, 104), (205, 105), (205, 106), (205, 107), (205, 108), (205, 109), (205, 110), (205, 111), (205, 112), (205, 113),
(205, 114), (205, 115), (205, 116), (205, 117), (205, 118), (205, 119), (205, 120), (205, 121), (205, 122), (205, 123), (205, 124), (205, 125), (205, 126), (205, 127), (205, 128), (205, 129), (205, 130), (205, 131), (205, 132), (205, 133), (205, 134), (205, 135), (205, 136), (205, 137), (205, 138), (205, 139), (205, 140), (205, 141), (205, 142), (205, 143), (205, 144), (205, 145), (205, 146), (205, 147), (205, 148), (205, 149), (205, 150), (205, 151), (205, 152), (205, 153), (205, 155), (206, 102), (206, 104), (206, 105), (206, 106), (206, 107), (206, 108), (206, 109), (206, 110), (206, 111), (206, 112), (206, 113), (206, 114), (206, 115), (206, 116), (206, 117), (206, 118), (206, 119), (206, 120), (206, 121), (206, 122), (206, 123), (206, 124), (206, 125), (206, 126), (206, 127), (206, 128), (206, 129), (206, 130), (206, 131), (206, 132), (206, 133),
(206, 134), (206, 135), (206, 136), (206, 137), (206, 138), (206, 139), (206, 140), (206, 141), (206, 142), (206, 143), (206, 144), (206, 145), (206, 146), (206, 147), (206, 148), (206, 149), (206, 150), (206, 151), (206, 152), (206, 153), (206, 155), (207, 102), (207, 104), (207, 105), (207, 106), (207, 107), (207, 108), (207, 109), (207, 110), (207, 111), (207, 112), (207, 113), (207, 114), (207, 115), (207, 116), (207, 117), (207, 118), (207, 119), (207, 120), (207, 121), (207, 122), (207, 123), (207, 124), (207, 125), (207, 126), (207, 127), (207, 128), (207, 129), (207, 130), (207, 131), (207, 132), (207, 133), (207, 134), (207, 135), (207, 136), (207, 137), (207, 138), (207, 139), (207, 140), (207, 141), (207, 142), (207, 143), (207, 144), (207, 145), (207, 146), (207, 147), (207, 148), (207, 149), (207, 150), (207, 151), (207, 152), (207, 153),
(207, 155), (208, 103), (208, 105), (208, 106), (208, 107), (208, 108), (208, 109), (208, 110), (208, 111), (208, 112), (208, 113), (208, 114), (208, 115), (208, 116), (208, 117), (208, 118), (208, 119), (208, 120), (208, 121), (208, 122), (208, 123), (208, 124), (208, 125), (208, 126), (208, 127), (208, 128), (208, 129), (208, 130), (208, 131), (208, 132), (208, 133), (208, 134), (208, 135), (208, 136), (208, 137), (208, 138), (208, 139), (208, 140), (208, 141), (208, 142), (208, 143), (208, 144), (208, 145), (208, 146), (208, 147), (208, 148), (208, 149), (208, 150), (208, 151), (208, 152), (208, 153), (208, 155), (209, 103), (209, 105), (209, 106), (209, 107), (209, 108), (209, 109), (209, 110), (209, 111), (209, 112), (209, 113), (209, 114), (209, 115), (209, 116), (209, 117), (209, 118), (209, 119), (209, 120), (209, 121), (209, 122), (209, 123),
(209, 124), (209, 125), (209, 126), (209, 127), (209, 128), (209, 129), (209, 130), (209, 131), (209, 132), (209, 133), (209, 134), (209, 135), (209, 136), (209, 137), (209, 138), (209, 139), (209, 140), (209, 141), (209, 142), (209, 143), (209, 144), (209, 145), (209, 146), (209, 147), (209, 148), (209, 149), (209, 150), (209, 151), (209, 152), (209, 153), (209, 155), (210, 103), (210, 105), (210, 106), (210, 107), (210, 108), (210, 109), (210, 110), (210, 111), (210, 112), (210, 113), (210, 114), (210, 115), (210, 116), (210, 117), (210, 118), (210, 119), (210, 120), (210, 121), (210, 122), (210, 123), (210, 124), (210, 125), (210, 126), (210, 127), (210, 128), (210, 129), (210, 130), (210, 131), (210, 132), (210, 133), (210, 134), (210, 135), (210, 136), (210, 137), (210, 138), (210, 139), (210, 140), (210, 141), (210, 142), (210, 143), (210, 144),
(210, 145), (210, 146), (210, 147), (210, 148), (210, 149), (210, 150), (210, 151), (210, 152), (210, 153), (210, 155), (211, 103), (211, 105), (211, 106), (211, 107), (211, 108), (211, 109), (211, 110), (211, 111), (211, 112), (211, 113), (211, 114), (211, 115), (211, 116), (211, 117), (211, 118), (211, 119), (211, 120), (211, 121), (211, 122), (211, 123), (211, 124), (211, 125), (211, 126), (211, 127), (211, 128), (211, 129), (211, 130), (211, 131), (211, 132), (211, 133), (211, 134), (211, 135), (211, 136), (211, 137), (211, 138), (211, 139), (211, 140), (211, 141), (211, 142), (211, 143), (211, 144), (211, 145), (211, 146), (211, 147), (211, 148), (211, 149), (211, 150), (211, 151), (211, 152), (211, 153), (211, 155), (212, 103), (212, 105), (212, 106), (212, 107), (212, 108), (212, 109), (212, 110), (212, 111), (212, 112), (212, 113), (212, 114),
(212, 115), (212, 116), (212, 117), (212, 118), (212, 119), (212, 120), (212, 121), (212, 122), (212, 123), (212, 124), (212, 125), (212, 126), (212, 127), (212, 128), (212, 129), (212, 130), (212, 131), (212, 132), (212, 133), (212, 134), (212, 135), (212, 136), (212, 137), (212, 138), (212, 139), (212, 140), (212, 141), (212, 142), (212, 143), (212, 144), (212, 145), (212, 146), (212, 147), (212, 148), (212, 149), (212, 150), (212, 151), (212, 152), (212, 153), (212, 155), (213, 103), (213, 105), (213, 106), (213, 107), (213, 108), (213, 109), (213, 110), (213, 111), (213, 112), (213, 113), (213, 114), (213, 115), (213, 116), (213, 117), (213, 118), (213, 119), (213, 120), (213, 121), (213, 122), (213, 123), (213, 124), (213, 125), (213, 126), (213, 127), (213, 128), (213, 129), (213, 130), (213, 131), (213, 132), (213, 133), (213, 134), (213, 135),
(213, 136), (213, 137), (213, 138), (213, 139), (213, 140), (213, 141), (213, 142), (213, 143), (213, 144), (213, 145), (213, 146), (213, 147), (213, 148), (213, 149), (213, 150), (213, 151), (213, 152), (213, 153), (213, 155), (214, 103), (214, 105), (214, 106), (214, 107), (214, 108), (214, 109), (214, 110), (214, 111), (214, 112), (214, 113), (214, 114), (214, 115), (214, 116), (214, 117), (214, 118), (214, 119), (214, 120), (214, 121), (214, 122), (214, 123), (214, 124), (214, 125), (214, 126), (214, 127), (214, 128), (214, 129), (214, 130), (214, 131), (214, 132), (214, 133), (214, 134), (214, 135), (214, 136), (214, 137), (214, 138), (214, 139), (214, 140), (214, 141), (214, 142), (214, 143), (214, 144), (214, 145), (214, 146), (214, 147), (214, 148), (214, 149), (214, 150), (214, 151), (214, 152), (214, 153), (214, 155), (215, 103), (215, 104),
(215, 105), (215, 106), (215, 107), (215, 108), (215, 109), (215, 110), (215, 111), (215, 112), (215, 113), (215, 114), (215, 115), (215, 116), (215, 117), (215, 118), (215, 119), (215, 120), (215, 121), (215, 122), (215, 123), (215, 124), (215, 125), (215, 126), (215, 127), (215, 128), (215, 129), (215, 130), (215, 131), (215, 132), (215, 133), (215, 134), (215, 135), (215, 136), (215, 137), (215, 138), (215, 139), (215, 140), (215, 141), (215, 142), (215, 143), (215, 144), (215, 145), (215, 146), (215, 147), (215, 148), (215, 149), (215, 150), (215, 151), (215, 152), (215, 153), (215, 155), (216, 104), (216, 106), (216, 107), (216, 108), (216, 109), (216, 110), (216, 111), (216, 112), (216, 113), (216, 114), (216, 115), (216, 116), (216, 117), (216, 118), (216, 119), (216, 120), (216, 121), (216, 122), (216, 123), (216, 124), (216, 125), (216, 126),
(216, 127), (216, 128), (216, 129), (216, 130), (216, 131), (216, 132), (216, 133), (216, 134), (216, 135), (216, 136), (216, 137), (216, 138), (216, 139), (216, 140), (216, 141), (216, 142), (216, 143), (216, 144), (216, 145), (216, 146), (216, 147), (216, 148), (216, 149), (216, 150), (216, 151), (216, 152), (216, 153), (216, 155), (217, 104), (217, 106), (217, 107), (217, 108), (217, 109), (217, 110), (217, 111), (217, 112), (217, 113), (217, 114), (217, 115), (217, 116), (217, 117), (217, 118), (217, 119), (217, 120), (217, 121), (217, 122), (217, 123), (217, 124), (217, 125), (217, 126), (217, 127), (217, 128), (217, 129), (217, 130), (217, 131), (217, 132), (217, 133), (217, 134), (217, 135), (217, 136), (217, 137), (217, 138), (217, 139), (217, 140), (217, 141), (217, 142), (217, 143), (217, 144), (217, 145), (217, 146), (217, 147), (217, 148),
(217, 149), (217, 150), (217, 151), (217, 152), (217, 153), (217, 155), (218, 104), (218, 106), (218, 107), (218, 108), (218, 109), (218, 110), (218, 111), (218, 112), (218, 113), (218, 114), (218, 115), (218, 116), (218, 117), (218, 118), (218, 119), (218, 120), (218, 121), (218, 122), (218, 123), (218, 124), (218, 125), (218, 126), (218, 127), (218, 128), (218, 129), (218, 130), (218, 131), (218, 132), (218, 133), (218, 134), (218, 135), (218, 136), (218, 137), (218, 138), (218, 139), (218, 140), (218, 141), (218, 142), (218, 143), (218, 144), (218, 145), (218, 146), (218, 147), (218, 148), (218, 149), (218, 150), (218, 151), (218, 152), (218, 153), (218, 155), (219, 104), (219, 106), (219, 107), (219, 108), (219, 109), (219, 110), (219, 111), (219, 112), (219, 113), (219, 114), (219, 115), (219, 116), (219, 117), (219, 118), (219, 119), (219, 120),
(219, 121), (219, 122), (219, 123), (219, 124), (219, 125), (219, 126), (219, 127), (219, 128), (219, 129), (219, 130), (219, 131), (219, 132), (219, 133), (219, 134), (219, 135), (219, 136), (219, 137), (219, 138), (219, 139), (219, 140), (219, 141), (219, 142), (219, 143), (219, 144), (219, 145), (219, 146), (219, 147), (219, 148), (219, 149), (219, 150), (219, 151), (219, 152), (219, 153), (219, 155), (220, 104), (220, 106), (220, 107), (220, 108), (220, 109), (220, 110), (220, 111), (220, 112), (220, 113), (220, 114), (220, 115), (220, 116), (220, 117), (220, 118), (220, 119), (220, 120), (220, 121), (220, 122), (220, 123), (220, 124), (220, 125), (220, 126), (220, 127), (220, 128), (220, 129), (220, 130), (220, 131), (220, 132), (220, 133), (220, 134), (220, 135), (220, 136), (220, 137), (220, 138), (220, 139), (220, 140), (220, 141), (220, 142),
(220, 143), (220, 144), (220, 145), (220, 146), (220, 147), (220, 148), (220, 149), (220, 150), (220, 151), (220, 152), (220, 153), (220, 154), (220, 156), (221, 104), (221, 106), (221, 107), (221, 108), (221, 109), (221, 110), (221, 111), (221, 112), (221, 113), (221, 114), (221, 115), (221, 116), (221, 117), (221, 118), (221, 119), (221, 120), (221, 121), (221, 122), (221, 123), (221, 124), (221, 125), (221, 126), (221, 127), (221, 128), (221, 129), (221, 130), (221, 131), (221, 132), (221, 133), (221, 134), (221, 135), (221, 136), (221, 137), (221, 138), (221, 139), (221, 140), (221, 141), (221, 142), (221, 143), (221, 144), (221, 145), (221, 146), (221, 147), (221, 148), (221, 149), (221, 150), (221, 151), (221, 152), (221, 153), (221, 154), (221, 155), (221, 157), (222, 104), (222, 106), (222, 107), (222, 108), (222, 109), (222, 110), (222, 111),
(222, 112), (222, 113), (222, 114), (222, 115), (222, 116), (222, 117), (222, 118), (222, 119), (222, 120), (222, 121), (222, 122), (222, 123), (222, 124), (222, 125), (222, 126), (222, 127), (222, 128), (222, 129), (222, 130), (222, 131), (222, 132), (222, 133), (222, 134), (222, 135), (222, 136), (222, 137), (222, 138), (222, 139), (222, 140), (222, 141), (222, 142), (222, 143), (222, 144), (222, 145), (222, 146), (222, 147), (222, 148), (222, 149), (222, 150), (222, 151), (222, 152), (222, 153), (222, 154), (222, 155), (222, 156), (222, 158), (223, 104), (223, 106), (223, 107), (223, 108), (223, 109), (223, 110), (223, 111), (223, 112), (223, 113), (223, 114), (223, 115), (223, 116), (223, 117), (223, 118), (223, 119), (223, 120), (223, 121), (223, 122), (223, 123), (223, 124), (223, 125), (223, 126), (223, 127), (223, 128), (223, 129), (223, 130),
(223, 131), (223, 132), (223, 133), (223, 134), (223, 135), (223, 136), (223, 137), (223, 138), (223, 139), (223, 140), (223, 141), (223, 142), (223, 143), (223, 144), (223, 145), (223, 146), (223, 147), (223, 148), (223, 149), (223, 150), (223, 151), (223, 152), (223, 153), (223, 154), (223, 155), (223, 157), (224, 104), (224, 106), (224, 107), (224, 108), (224, 109), (224, 110), (224, 111), (224, 112), (224, 113), (224, 114), (224, 115), (224, 116), (224, 117), (224, 118), (224, 119), (224, 120), (224, 121), (224, 122), (224, 123), (224, 124), (224, 125), (224, 126), (224, 127), (224, 128), (224, 129), (224, 130), (224, 131), (224, 132), (224, 133), (224, 134), (224, 135), (224, 136), (224, 137), (224, 138), (224, 139), (224, 140), (224, 141), (224, 142), (224, 143), (224, 144), (224, 145), (224, 146), (224, 147), (224, 148), (224, 149), (224, 150),
(224, 151), (224, 152), (224, 153), (224, 154), (224, 156), (225, 104), (225, 106), (225, 107), (225, 108), (225, 109), (225, 110), (225, 111), (225, 112), (225, 113), (225, 114), (225, 115), (225, 116), (225, 117), (225, 118), (225, 119), (225, 120), (225, 121), (225, 122), (225, 123), (225, 124), (225, 125), (225, 126), (225, 127), (225, 128), (225, 129), (225, 130), (225, 131), (225, 132), (225, 133), (225, 134), (225, 135), (225, 136), (225, 137), (225, 138), (225, 139), (225, 140), (225, 141), (225, 142), (225, 143), (225, 144), (225, 145), (225, 146), (225, 147), (225, 148), (225, 149), (225, 150), (225, 151), (225, 152), (225, 153), (225, 155), (226, 105), (226, 107), (226, 108), (226, 109), (226, 110), (226, 111), (226, 112), (226, 113), (226, 114), (226, 115), (226, 116), (226, 117), (226, 118), (226, 119), (226, 120), (226, 121), (226, 122),
(226, 123), (226, 124), (226, 125), (226, 126), (226, 127), (226, 128), (226, 129), (226, 130), (226, 131), (226, 132), (226, 133), (226, 134), (226, 135), (226, 136), (226, 137), (226, 138), (226, 139), (226, 140), (226, 141), (226, 142), (226, 143), (226, 144), (226, 145), (226, 146), (226, 147), (226, 148), (226, 149), (226, 150), (226, 151), (226, 152), (226, 153), (226, 155), (227, 105), (227, 107), (227, 108), (227, 109), (227, 110), (227, 111), (227, 112), (227, 113), (227, 114), (227, 115), (227, 116), (227, 117), (227, 118), (227, 119), (227, 120), (227, 121), (227, 122), (227, 123), (227, 124), (227, 125), (227, 126), (227, 127), (227, 128), (227, 129), (227, 130), (227, 131), (227, 132), (227, 133), (227, 134), (227, 135), (227, 136), (227, 137), (227, 138), (227, 139), (227, 140), (227, 141), (227, 142), (227, 143), (227, 144), (227, 145),
(227, 146), (227, 147), (227, 148), (227, 149), (227, 150), (227, 151), (227, 152), (227, 153), (227, 155), (228, 105), (228, 107), (228, 108), (228, 109), (228, 110), (228, 111), (228, 112), (228, 113), (228, 114), (228, 115), (228, 116), (228, 117), (228, 118), (228, 119), (228, 120), (228, 121), (228, 122), (228, 123), (228, 124), (228, 125), (228, 126), (228, 127), (228, 128), (228, 129), (228, 130), (228, 131), (228, 132), (228, 133), (228, 134), (228, 135), (228, 136), (228, 137), (228, 138), (228, 139), (228, 140), (228, 141), (228, 142), (228, 143), (228, 144), (228, 145), (228, 146), (228, 147), (228, 148), (228, 149), (228, 150), (228, 151), (228, 152), (228, 153), (228, 155), (229, 105), (229, 107), (229, 108), (229, 109), (229, 110), (229, 111), (229, 112), (229, 113), (229, 114), (229, 115), (229, 116), (229, 117), (229, 118), (229, 119),
(229, 120), (229, 121), (229, 122), (229, 123), (229, 124), (229, 125), (229, 126), (229, 127), (229, 128), (229, 129), (229, 130), (229, 131), (229, 132), (229, 133), (229, 134), (229, 135), (229, 136), (229, 137), (229, 138), (229, 139), (229, 140), (229, 141), (229, 142), (229, 143), (229, 144), (229, 145), (229, 146), (229, 147), (229, 148), (229, 149), (229, 150), (229, 151), (229, 152), (229, 153), (229, 155), (230, 105), (230, 107), (230, 108), (230, 109), (230, 110), (230, 111), (230, 112), (230, 113), (230, 114), (230, 115), (230, 116), (230, 117), (230, 118), (230, 119), (230, 120), (230, 121), (230, 122), (230, 123), (230, 124), (230, 125), (230, 126), (230, 127), (230, 128), (230, 129), (230, 130), (230, 131), (230, 132), (230, 133), (230, 134), (230, 135), (230, 136), (230, 137), (230, 138), (230, 139), (230, 140), (230, 141), (230, 142),
(230, 143), (230, 144), (230, 145), (230, 146), (230, 147), (230, 148), (230, 149), (230, 150), (230, 151), (230, 152), (230, 153), (230, 155), (231, 104), (231, 106), (231, 107), (231, 108), (231, 109), (231, 110), (231, 111), (231, 112), (231, 113), (231, 114), (231, 115), (231, 116), (231, 117), (231, 118), (231, 119), (231, 120), (231, 121), (231, 122), (231, 123), (231, 124), (231, 125), (231, 126), (231, 127), (231, 128), (231, 129), (231, 130), (231, 131), (231, 132), (231, 133), (231, 134), (231, 135), (231, 136), (231, 137), (231, 138), (231, 139), (231, 140), (231, 141), (231, 142), (231, 143), (231, 144), (231, 145), (231, 146), (231, 147), (231, 148), (231, 149), (231, 150), (231, 151), (231, 152), (231, 153), (231, 155), (232, 104), (232, 106), (232, 107), (232, 108), (232, 109), (232, 110), (232, 111), (232, 112), (232, 113), (232, 114),
(232, 115), (232, 116), (232, 117), (232, 118), (232, 119), (232, 120), (232, 121), (232, 122), (232, 123), (232, 124), (232, 125), (232, 126), (232, 127), (232, 128), (232, 129), (232, 130), (232, 131), (232, 132), (232, 133), (232, 134), (232, 135), (232, 136), (232, 137), (232, 138), (232, 139), (232, 140), (232, 141), (232, 142), (232, 143), (232, 144), (232, 145), (232, 146), (232, 147), (232, 148), (232, 149), (232, 150), (232, 151), (232, 152), (232, 153), (232, 155), (233, 103), (233, 104), (233, 105), (233, 106), (233, 107), (233, 108), (233, 109), (233, 110), (233, 111), (233, 112), (233, 113), (233, 114), (233, 115), (233, 116), (233, 117), (233, 118), (233, 119), (233, 120), (233, 121), (233, 122), (233, 123), (233, 124), (233, 125), (233, 126), (233, 127), (233, 128), (233, 129), (233, 130), (233, 131), (233, 132), (233, 133), (233, 134),
(233, 135), (233, 136), (233, 137), (233, 138), (233, 139), (233, 140), (233, 141), (233, 142), (233, 143), (233, 144), (233, 145), (233, 146), (233, 147), (233, 148), (233, 149), (233, 150), (233, 151), (233, 152), (233, 153), (233, 155), (234, 103), (234, 105), (234, 106), (234, 107), (234, 108), (234, 109), (234, 110), (234, 111), (234, 112), (234, 113), (234, 114), (234, 115), (234, 116), (234, 117), (234, 118), (234, 119), (234, 120), (234, 121), (234, 122), (234, 123), (234, 124), (234, 125), (234, 126), (234, 127), (234, 128), (234, 129), (234, 130), (234, 131), (234, 132), (234, 133), (234, 134), (234, 135), (234, 136), (234, 137), (234, 138), (234, 139), (234, 140), (234, 141), (234, 142), (234, 143), (234, 144), (234, 145), (234, 146), (234, 147), (234, 148), (234, 149), (234, 150), (234, 151), (234, 152), (234, 153), (234, 155), (235, 102),
(235, 104), (235, 105), (235, 106), (235, 107), (235, 108), (235, 109), (235, 110), (235, 111), (235, 112), (235, 113), (235, 114), (235, 115), (235, 116), (235, 117), (235, 118), (235, 119), (235, 120), (235, 121), (235, 122), (235, 123), (235, 124), (235, 125), (235, 126), (235, 127), (235, 128), (235, 129), (235, 130), (235, 131), (235, 132), (235, 133), (235, 134), (235, 135), (235, 136), (235, 137), (235, 138), (235, 139), (235, 140), (235, 141), (235, 142), (235, 143), (235, 144), (235, 145), (235, 146), (235, 147), (235, 148), (235, 149), (235, 150), (235, 151), (235, 152), (235, 153), (235, 155), (236, 102), (236, 104), (236, 105), (236, 106), (236, 107), (236, 108), (236, 109), (236, 110), (236, 111), (236, 112), (236, 113), (236, 114), (236, 115), (236, 116), (236, 117), (236, 118), (236, 119), (236, 120), (236, 121), (236, 122), (236, 123),
(236, 124), (236, 125), (236, 126), (236, 127), (236, 128), (236, 129), (236, 130), (236, 131), (236, 132), (236, 133), (236, 134), (236, 135), (236, 136), (236, 137), (236, 138), (236, 139), (236, 140), (236, 141), (236, 142), (236, 143), (236, 144), (236, 145), (236, 146), (236, 147), (236, 148), (236, 149), (236, 150), (236, 151), (236, 152), (236, 153), (236, 155), (237, 101), (237, 103), (237, 104), (237, 105), (237, 106), (237, 107), (237, 108), (237, 109), (237, 110), (237, 111), (237, 112), (237, 113), (237, 114), (237, 115), (237, 116), (237, 117), (237, 118), (237, 119), (237, 120), (237, 121), (237, 122), (237, 123), (237, 124), (237, 125), (237, 126), (237, 127), (237, 128), (237, 129), (237, 130), (237, 131), (237, 132), (237, 133), (237, 134), (237, 135), (237, 136), (237, 137), (237, 138), (237, 139), (237, 140), (237, 141), (237, 142),
(237, 143), (237, 144), (237, 145), (237, 146), (237, 147), (237, 148), (237, 149), (237, 150), (237, 151), (237, 152), (237, 153), (237, 155), (238, 101), (238, 103), (238, 104), (238, 105), (238, 106), (238, 107), (238, 108), (238, 109), (238, 110), (238, 111), (238, 112), (238, 113), (238, 114), (238, 115), (238, 116), (238, 117), (238, 118), (238, 119), (238, 120), (238, 121), (238, 122), (238, 123), (238, 124), (238, 125), (238, 126), (238, 127), (238, 128), (238, 129), (238, 130), (238, 131), (238, 132), (238, 133), (238, 134), (238, 135), (238, 136), (238, 137), (238, 138), (238, 139), (238, 140), (238, 141), (238, 142), (238, 143), (238, 144), (238, 145), (238, 146), (238, 147), (238, 148), (238, 149), (238, 150), (238, 151), (238, 152), (238, 153), (238, 155), (239, 100), (239, 102), (239, 103), (239, 104), (239, 105), (239, 106), (239, 107),
(239, 108), (239, 109), (239, 110), (239, 111), (239, 112), (239, 113), (239, 114), (239, 115), (239, 116), (239, 117), (239, 118), (239, 119), (239, 120), (239, 121), (239, 122), (239, 123), (239, 124), (239, 125), (239, 126), (239, 127), (239, 128), (239, 129), (239, 130), (239, 131), (239, 132), (239, 133), (239, 134), (239, 135), (239, 136), (239, 137), (239, 138), (239, 139), (239, 140), (239, 141), (239, 142), (239, 143), (239, 144), (239, 145), (239, 146), (239, 147), (239, 148), (239, 149), (239, 150), (239, 151), (239, 152), (239, 153), (239, 155), (240, 100), (240, 102), (240, 103), (240, 104), (240, 105), (240, 106), (240, 107), (240, 108), (240, 109), (240, 110), (240, 111), (240, 112), (240, 113), (240, 114), (240, 115), (240, 116), (240, 117), (240, 118), (240, 119), (240, 120), (240, 121), (240, 122), (240, 123), (240, 124), (240, 125),
(240, 126), (240, 127), (240, 128), (240, 129), (240, 130), (240, 131), (240, 132), (240, 133), (240, 134), (240, 135), (240, 136), (240, 137), (240, 138), (240, 139), (240, 140), (240, 141), (240, 142), (240, 143), (240, 144), (240, 145), (240, 146), (240, 147), (240, 148), (240, 149), (240, 150), (240, 151), (240, 152), (240, 153), (240, 155), (241, 99), (241, 101), (241, 102), (241, 103), (241, 104), (241, 105), (241, 106), (241, 107), (241, 108), (241, 109), (241, 110), (241, 111), (241, 112), (241, 113), (241, 114), (241, 115), (241, 116), (241, 117), (241, 118), (241, 119), (241, 120), (241, 121), (241, 122), (241, 123), (241, 124), (241, 125), (241, 126), (241, 127), (241, 128), (241, 129), (241, 130), (241, 131), (241, 132), (241, 133), (241, 134), (241, 135), (241, 136), (241, 137), (241, 138), (241, 139), (241, 140), (241, 141), (241, 142),
(241, 143), (241, 144), (241, 145), (241, 146), (241, 147), (241, 148), (241, 149), (241, 150), (241, 151), (241, 152), (241, 153), (241, 155), (242, 99), (242, 101), (242, 102), (242, 103), (242, 104), (242, 105), (242, 106), (242, 107), (242, 108), (242, 109), (242, 110), (242, 111), (242, 112), (242, 113), (242, 114), (242, 115), (242, 116), (242, 117), (242, 118), (242, 119), (242, 120), (242, 121), (242, 122), (242, 123), (242, 124), (242, 125), (242, 126), (242, 127), (242, 128), (242, 129), (242, 130), (242, 131), (242, 132), (242, 133), (242, 134), (242, 135), (242, 136), (242, 137), (242, 138), (242, 139), (242, 140), (242, 141), (242, 142), (242, 143), (242, 144), (242, 145), (242, 146), (242, 147), (242, 148), (242, 149), (242, 150), (242, 151), (242, 152), (242, 153), (242, 155), (243, 98), (243, 100), (243, 101), (243, 102), (243, 103),
(243, 104), (243, 105), (243, 106), (243, 107), (243, 108), (243, 109), (243, 110), (243, 111), (243, 112), (243, 113), (243, 114), (243, 115), (243, 116), (243, 117), (243, 118), (243, 119), (243, 120), (243, 121), (243, 122), (243, 123), (243, 124), (243, 125), (243, 126), (243, 127), (243, 128), (243, 129), (243, 130), (243, 131), (243, 132), (243, 133), (243, 134), (243, 135), (243, 136), (243, 137), (243, 138), (243, 139), (243, 140), (243, 141), (243, 142), (243, 143), (243, 144), (243, 145), (243, 146), (243, 147), (243, 148), (243, 149), (243, 150), (243, 151), (243, 152), (243, 153), (243, 155), (244, 98), (244, 100), (244, 101), (244, 102), (244, 103), (244, 104), (244, 105), (244, 106), (244, 107), (244, 108), (244, 109), (244, 110), (244, 111), (244, 112), (244, 113), (244, 114), (244, 115), (244, 116), (244, 117), (244, 118), (244, 119),
(244, 120), (244, 121), (244, 122), (244, 123), (244, 124), (244, 125), (244, 126), (244, 127), (244, 128), (244, 129), (244, 130), (244, 131), (244, 132), (244, 133), (244, 134), (244, 135), (244, 136), (244, 137), (244, 138), (244, 139), (244, 140), (244, 141), (244, 142), (244, 143), (244, 144), (244, 145), (244, 146), (244, 147), (244, 148), (244, 149), (244, 150), (244, 151), (244, 152), (244, 153), (244, 155), (245, 98), (245, 100), (245, 101), (245, 102), (245, 103), (245, 104), (245, 105), (245, 106), (245, 107), (245, 108), (245, 109), (245, 110), (245, 111), (245, 112), (245, 113), (245, 114), (245, 115), (245, 116), (245, 117), (245, 118), (245, 119), (245, 120), (245, 121), (245, 122), (245, 123), (245, 124), (245, 125), (245, 126), (245, 127), (245, 128), (245, 129), (245, 130), (245, 131), (245, 132), (245, 133), (245, 134), (245, 135),
(245, 136), (245, 137), (245, 138), (245, 139), (245, 140), (245, 141), (245, 142), (245, 143), (245, 144), (245, 145), (245, 146), (245, 147), (245, 148), (245, 149), (245, 150), (245, 151), (245, 152), (245, 153), (245, 155), (246, 97), (246, 99), (246, 100), (246, 101), (246, 102), (246, 103), (246, 104), (246, 105), (246, 106), (246, 107), (246, 108), (246, 109), (246, 110), (246, 111), (246, 112), (246, 113), (246, 114), (246, 115), (246, 116), (246, 117), (246, 118), (246, 119), (246, 120), (246, 121), (246, 122), (246, 123), (246, 124), (246, 125), (246, 126), (246, 127), (246, 128), (246, 129), (246, 130), (246, 131), (246, 132), (246, 133), (246, 134), (246, 135), (246, 136), (246, 137), (246, 138), (246, 139), (246, 140), (246, 141), (246, 142), (246, 143), (246, 144), (246, 145), (246, 146), (246, 147), (246, 148), (246, 149), (246, 150),
(246, 151), (246, 152), (246, 153), (246, 155), (247, 97), (247, 99), (247, 100), (247, 101), (247, 102), (247, 103), (247, 104), (247, 105), (247, 106), (247, 107), (247, 108), (247, 109), (247, 110), (247, 111), (247, 112), (247, 113), (247, 114), (247, 115), (247, 116), (247, 117), (247, 118), (247, 119), (247, 120), (247, 121), (247, 122), (247, 123), (247, 124), (247, 125), (247, 126), (247, 127), (247, 128), (247, 129), (247, 130), (247, 131), (247, 132), (247, 133), (247, 134), (247, 135), (247, 136), (247, 137), (247, 138), (247, 139), (247, 140), (247, 141), (247, 142), (247, 143), (247, 144), (247, 145), (247, 146), (247, 147), (247, 148), (247, 149), (247, 150), (247, 151), (247, 152), (247, 153), (247, 155), (248, 97), (248, 99), (248, 100), (248, 101), (248, 102), (248, 103), (248, 104), (248, 105), (248, 106), (248, 107), (248, 108),
(248, 109), (248, 110), (248, 111), (248, 112), (248, 113), (248, 114), (248, 115), (248, 116), (248, 117), (248, 118), (248, 119), (248, 120), (248, 121), (248, 122), (248, 123), (248, 124), (248, 125), (248, 126), (248, 127), (248, 128), (248, 129), (248, 130), (248, 131), (248, 132), (248, 133), (248, 134), (248, 135), (248, 136), (248, 137), (248, 138), (248, 139), (248, 140), (248, 141), (248, 142), (248, 143), (248, 144), (248, 145), (248, 146), (248, 147), (248, 148), (248, 149), (248, 150), (248, 151), (248, 152), (248, 153), (248, 155), (249, 96), (249, 98), (249, 99), (249, 100), (249, 101), (249, 102), (249, 103), (249, 104), (249, 105), (249, 106), (249, 107), (249, 108), (249, 109), (249, 110), (249, 111), (249, 112), (249, 113), (249, 114), (249, 115), (249, 116), (249, 117), (249, 118), (249, 119), (249, 120), (249, 121), (249, 122),
(249, 123), (249, 124), (249, 125), (249, 126), (249, 127), (249, 128), (249, 129), (249, 130), (249, 131), (249, 132), (249, 133), (249, 134), (249, 135), (249, 136), (249, 137), (249, 138), (249, 139), (249, 140), (249, 141), (249, 142), (249, 143), (249, 144), (249, 145), (249, 146), (249, 147), (249, 148), (249, 149), (249, 150), (249, 151), (249, 152), (249, 153), (249, 155), (250, 96), (250, 98), (250, 99), (250, 100), (250, 101), (250, 102), (250, 103), (250, 104), (250, 105), (250, 106), (250, 107), (250, 108), (250, 109), (250, 110), (250, 111), (250, 112), (250, 113), (250, 114), (250, 115), (250, 116), (250, 117), (250, 118), (250, 119), (250, 120), (250, 121), (250, 122), (250, 123), (250, 124), (250, 125), (250, 126), (250, 127), (250, 128), (250, 129), (250, 130), (250, 131), (250, 132), (250, 133), (250, 134), (250, 135), (250, 136),
(250, 137), (250, 138), (250, 139), (250, 140), (250, 141), (250, 142), (250, 143), (250, 144), (250, 145), (250, 146), (250, 147), (250, 148), (250, 149), (250, 150), (250, 151), (250, 152), (250, 153), (250, 155), (251, 96), (251, 98), (251, 99), (251, 100), (251, 101), (251, 102), (251, 103), (251, 104), (251, 105), (251, 106), (251, 107), (251, 108), (251, 109), (251, 110), (251, 111), (251, 112), (251, 113), (251, 114), (251, 115), (251, 116), (251, 117), (251, 118), (251, 119), (251, 120), (251, 121), (251, 122), (251, 123), (251, 124), (251, 125), (251, 126), (251, 127), (251, 128), (251, 129), (251, 130), (251, 131), (251, 132), (251, 133), (251, 134), (251, 135), (251, 136), (251, 137), (251, 138), (251, 139), (251, 140), (251, 141), (251, 142), (251, 143), (251, 144), (251, 145), (251, 146), (251, 147), (251, 148), (251, 149), (251, 150),
(251, 151), (251, 152), (251, 153), (251, 155), (252, 96), (252, 98), (252, 99), (252, 100), (252, 101), (252, 102), (252, 103), (252, 104), (252, 105), (252, 106), (252, 107), (252, 108), (252, 109), (252, 110), (252, 111), (252, 112), (252, 113), (252, 114), (252, 115), (252, 116), (252, 117), (252, 118), (252, 119), (252, 120), (252, 121), (252, 122), (252, 123), (252, 124), (252, 125), (252, 126), (252, 127), (252, 128), (252, 129), (252, 130), (252, 131), (252, 132), (252, 133), (252, 134), (252, 135), (252, 136), (252, 137), (252, 138), (252, 139), (252, 140), (252, 141), (252, 142), (252, 143), (252, 144), (252, 145), (252, 146), (252, 147), (252, 148), (252, 149), (252, 150), (252, 151), (252, 152), (252, 153), (252, 155), (253, 96), (253, 98), (253, 99), (253, 100), (253, 101), (253, 102), (253, 103), (253, 104), (253, 105), (253, 106),
(253, 107), (253, 108), (253, 109), (253, 110), (253, 111), (253, 112), (253, 113), (253, 114), (253, 115), (253, 116), (253, 117), (253, 118), (253, 119), (253, 120), (253, 121), (253, 122), (253, 123), (253, 124), (253, 125), (253, 126), (253, 127), (253, 128), (253, 129), (253, 130), (253, 131), (253, 132), (253, 133), (253, 134), (253, 135), (253, 136), (253, 137), (253, 138), (253, 139), (253, 140), (253, 141), (253, 142), (253, 143), (253, 144), (253, 145), (253, 146), (253, 147), (253, 148), (253, 149), (253, 150), (253, 151), (253, 152), (253, 153), (253, 155), (254, 96), (254, 98), (254, 99), (254, 100), (254, 101), (254, 102), (254, 103), (254, 104), (254, 105), (254, 106), (254, 107), (254, 108), (254, 109), (254, 110), (254, 111), (254, 112), (254, 113), (254, 114), (254, 115), (254, 116), (254, 117), (254, 118), (254, 119), (254, 120),
(254, 121), (254, 122), (254, 123), (254, 124), (254, 125), (254, 126), (254, 127), (254, 128), (254, 129), (254, 130), (254, 131), (254, 132), (254, 133), (254, 134), (254, 135), (254, 136), (254, 137), (254, 138), (254, 139), (254, 140), (254, 141), (254, 142), (254, 143), (254, 144), (254, 145), (254, 146), (254, 147), (254, 148), (254, 149), (254, 150), (254, 151), (254, 152), (254, 153), (254, 155), (255, 95), (255, 97), (255, 98), (255, 99), (255, 100), (255, 101), (255, 102), (255, 103), (255, 104), (255, 105), (255, 106), (255, 107), (255, 108), (255, 109), (255, 110), (255, 111), (255, 112), (255, 113), (255, 114), (255, 115), (255, 116), (255, 117), (255, 118), (255, 119), (255, 120), (255, 121), (255, 122), (255, 123), (255, 124), (255, 125), (255, 126), (255, 127), (255, 128), (255, 129), (255, 130), (255, 131), (255, 132), (255, 133),
(255, 134), (255, 135), (255, 136), (255, 137), (255, 138), (255, 139), (255, 140), (255, 141), (255, 142), (255, 143), (255, 144), (255, 145), (255, 146), (255, 147), (255, 148), (255, 149), (255, 150), (255, 151), (255, 152), (255, 153), (255, 155), (256, 95), (256, 97), (256, 98), (256, 99), (256, 100), (256, 101), (256, 102), (256, 103), (256, 104), (256, 105), (256, 106), (256, 107), (256, 108), (256, 109), (256, 110), (256, 111), (256, 112), (256, 113), (256, 114), (256, 115), (256, 116), (256, 117), (256, 118), (256, 119), (256, 120), (256, 121), (256, 122), (256, 123), (256, 124), (256, 125), (256, 126), (256, 127), (256, 128), (256, 129), (256, 130), (256, 131), (256, 132), (256, 133), (256, 134), (256, 135), (256, 136), (256, 137), (256, 138), (256, 139), (256, 140), (256, 141), (256, 142), (256, 143), (256, 144), (256, 145), (256, 146),
(256, 147), (256, 148), (256, 149), (256, 150), (256, 151), (256, 152), (256, 153), (256, 155), (257, 95), (257, 97), (257, 98), (257, 99), (257, 100), (257, 101), (257, 102), (257, 103), (257, 104), (257, 105), (257, 106), (257, 107), (257, 108), (257, 109), (257, 110), (257, 111), (257, 112), (257, 113), (257, 114), (257, 115), (257, 116), (257, 117), (257, 118), (257, 119), (257, 120), (257, 121), (257, 122), (257, 123), (257, 124), (257, 125), (257, 126), (257, 127), (257, 128), (257, 129), (257, 130), (257, 131), (257, 132), (257, 133), (257, 134), (257, 135), (257, 136), (257, 137), (257, 138), (257, 139), (257, 140), (257, 141), (257, 142), (257, 143), (257, 144), (257, 145), (257, 146), (257, 147), (257, 148), (257, 149), (257, 150), (257, 151), (257, 152), (257, 153), (257, 155), (258, 95), (258, 97), (258, 98), (258, 99), (258, 100),
(258, 101), (258, 102), (258, 103), (258, 104), (258, 105), (258, 106), (258, 107), (258, 108), (258, 109), (258, 110), (258, 111), (258, 112), (258, 113), (258, 114), (258, 115), (258, 116), (258, 117), (258, 118), (258, 119), (258, 120), (258, 121), (258, 122), (258, 123), (258, 124), (258, 125), (258, 126), (258, 127), (258, 128), (258, 129), (258, 130), (258, 131), (258, 132), (258, 133), (258, 134), (258, 135), (258, 136), (258, 137), (258, 138), (258, 139), (258, 140), (258, 141), (258, 142), (258, 143), (258, 144), (258, 145), (258, 146), (258, 147), (258, 148), (258, 149), (258, 150), (258, 151), (258, 152), (258, 153), (258, 155), (259, 95), (259, 97), (259, 98), (259, 99), (259, 100), (259, 101), (259, 102), (259, 103), (259, 104), (259, 105), (259, 106), (259, 107), (259, 108), (259, 109), (259, 110), (259, 111), (259, 112), (259, 113),
(259, 114), (259, 115), (259, 116), (259, 117), (259, 118), (259, 119), (259, 120), (259, 121), (259, 122), (259, 123), (259, 124), (259, 125), (259, 126), (259, 127), (259, 128), (259, 129), (259, 130), (259, 131), (259, 132), (259, 133), (259, 134), (259, 135), (259, 136), (259, 137), (259, 138), (259, 139), (259, 140), (259, 141), (259, 142), (259, 143), (259, 144), (259, 145), (259, 146), (259, 147), (259, 148), (259, 149), (259, 150), (259, 151), (259, 152), (259, 153), (259, 155), (260, 95), (260, 109), (260, 110), (260, 111), (260, 112), (260, 113), (260, 114), (260, 115), (260, 116), (260, 117), (260, 118), (260, 119), (260, 120), (260, 121), (260, 122), (260, 123), (260, 124), (260, 125), (260, 126), (260, 127), (260, 128), (260, 129), (260, 130), (260, 131), (260, 132), (260, 133), (260, 134), (260, 135), (260, 136), (260, 137), (260, 138),
(260, 139), (260, 140), (260, 141), (260, 142), (260, 143), (260, 144), (260, 145), (260, 146), (260, 147), (260, 148), (260, 149), (260, 150), (260, 151), (260, 152), (260, 153), (260, 155), (261, 95), (261, 97), (261, 98), (261, 99), (261, 100), (261, 101), (261, 102), (261, 103), (261, 104), (261, 105), (261, 106), (261, 107), (261, 110), (261, 111), (261, 112), (261, 113), (261, 114), (261, 115), (261, 116), (261, 117), (261, 118), (261, 119), (261, 120), (261, 121), (261, 122), (261, 123), (261, 124), (261, 125), (261, 126), (261, 127), (261, 128), (261, 129), (261, 130), (261, 131), (261, 132), (261, 133), (261, 134), (261, 135), (261, 136), (261, 137), (261, 138), (261, 139), (261, 140), (261, 141), (261, 142), (261, 143), (261, 144), (261, 145), (261, 146), (261, 147), (261, 148), (261, 149), (261, 150), (261, 151), (261, 152), (261, 154),
(262, 95), (262, 109), (262, 111), (262, 112), (262, 113), (262, 114), (262, 115), (262, 116), (262, 117), (262, 118), (262, 119), (262, 120), (262, 121), (262, 122), (262, 123), (262, 124), (262, 125), (262, 126), (262, 127), (262, 128), (262, 129), (262, 130), (262, 131), (262, 132), (262, 133), (262, 134), (262, 135), (262, 136), (262, 137), (262, 138), (262, 139), (262, 140), (262, 141), (262, 142), (262, 143), (262, 144), (262, 145), (262, 146), (262, 147), (262, 148), (262, 149), (262, 150), (262, 151), (262, 152), (262, 154), (263, 110), (263, 112), (263, 113), (263, 114), (263, 115), (263, 116), (263, 117), (263, 118), (263, 119), (263, 120), (263, 121), (263, 122), (263, 123), (263, 124), (263, 125), (263, 126), (263, 127), (263, 128), (263, 129), (263, 130), (263, 131), (263, 132), (263, 133), (263, 134), (263, 135), (263, 136), (263, 137),
(263, 138), (263, 139), (263, 140), (263, 141), (263, 142), (263, 143), (263, 144), (263, 145), (263, 146), (263, 147), (263, 148), (263, 149), (263, 150), (263, 151), (263, 153), (264, 111), (264, 113), (264, 114), (264, 115), (264, 116), (264, 117), (264, 118), (264, 119), (264, 120), (264, 121), (264, 122), (264, 123), (264, 124), (264, 125), (264, 126), (264, 127), (264, 128), (264, 129), (264, 130), (264, 131), (264, 132), (264, 133), (264, 134), (264, 135), (264, 136), (264, 137), (264, 138), (264, 139), (264, 140), (264, 141), (264, 142), (264, 143), (264, 144), (264, 145), (264, 146), (264, 147), (264, 148), (264, 149), (264, 150), (264, 152), (265, 112), (265, 114), (265, 115), (265, 116), (265, 117), (265, 118), (265, 119), (265, 120), (265, 121), (265, 122), (265, 123), (265, 124), (265, 125), (265, 126), (265, 127), (265, 128), (265, 129),
(265, 130), (265, 131), (265, 132), (265, 133), (265, 134), (265, 135), (265, 136), (265, 137), (265, 138), (265, 139), (265, 140), (265, 141), (265, 142), (265, 143), (265, 144), (265, 145), (265, 146), (265, 147), (265, 148), (265, 149), (265, 150), (265, 152), (266, 112), (266, 114), (266, 115), (266, 116), (266, 117), (266, 118), (266, 119), (266, 120), (266, 121), (266, 122), (266, 123), (266, 124), (266, 125), (266, 126), (266, 127), (266, 128), (266, 129), (266, 130), (266, 131), (266, 132), (266, 133), (266, 134), (266, 135), (266, 136), (266, 137), (266, 138), (266, 139), (266, 140), (266, 141), (266, 142), (266, 143), (266, 144), (266, 145), (266, 146), (266, 147), (266, 148), (266, 149), (266, 151), (267, 113), (267, 115), (267, 116), (267, 117), (267, 118), (267, 119), (267, 120), (267, 121), (267, 122), (267, 123), (267, 124), (267, 125),
(267, 126), (267, 127), (267, 128), (267, 129), (267, 130), (267, 131), (267, 132), (267, 133), (267, 134), (267, 135), (267, 136), (267, 137), (267, 138), (267, 139), (267, 140), (267, 141), (267, 142), (267, 143), (267, 144), (267, 145), (267, 146), (267, 147), (267, 148), (267, 150), (268, 114), (268, 116), (268, 117), (268, 118), (268, 119), (268, 120), (268, 121), (268, 122), (268, 123), (268, 124), (268, 125), (268, 126), (268, 127), (268, 128), (268, 129), (268, 130), (268, 131), (268, 132), (268, 133), (268, 134), (268, 135), (268, 136), (268, 137), (268, 138), (268, 139), (268, 140), (268, 141), (268, 142), (268, 143), (268, 144), (268, 145), (268, 146), (268, 147), (268, 148), (268, 150), (269, 114), (269, 116), (269, 117), (269, 118), (269, 119), (269, 120), (269, 121), (269, 122), (269, 123), (269, 124), (269, 125), (269, 126), (269, 127),
(269, 128), (269, 129), (269, 130), (269, 131), (269, 132), (269, 133), (269, 134), (269, 135), (269, 136), (269, 137), (269, 138), (269, 139), (269, 140), (269, 141), (269, 142), (269, 143), (269, 144), (269, 145), (269, 146), (269, 147), (269, 149), (270, 115), (270, 117), (270, 118), (270, 119), (270, 120), (270, 121), (270, 122), (270, 123), (270, 124), (270, 125), (270, 126), (270, 127), (270, 128), (270, 129), (270, 130), (270, 131), (270, 132), (270, 133), (270, 134), (270, 135), (270, 136), (270, 137), (270, 138), (270, 139), (270, 140), (270, 141), (270, 142), (270, 143), (270, 144), (270, 145), (270, 146), (270, 147), (270, 149), (271, 115), (271, 117), (271, 118), (271, 119), (271, 120), (271, 121), (271, 122), (271, 123), (271, 124), (271, 125), (271, 126), (271, 127), (271, 128), (271, 129), (271, 130), (271, 131), (271, 132), (271, 133),
(271, 134), (271, 135), (271, 136), (271, 137), (271, 138), (271, 139), (271, 140), (271, 141), (271, 142), (271, 143), (271, 144), (271, 145), (271, 146), (271, 148), (272, 116), (272, 118), (272, 119), (272, 120), (272, 121), (272, 122), (272, 123), (272, 124), (272, 125), (272, 126), (272, 127), (272, 128), (272, 129), (272, 130), (272, 131), (272, 132), (272, 133), (272, 134), (272, 135), (272, 136), (272, 137), (272, 138), (272, 139), (272, 140), (272, 141), (272, 142), (272, 143), (272, 144), (272, 145), (272, 147), (273, 116), (273, 118), (273, 119), (273, 120), (273, 121), (273, 122), (273, 123), (273, 124), (273, 125), (273, 126), (273, 127), (273, 128), (273, 129), (273, 130), (273, 131), (273, 132), (273, 133), (273, 134), (273, 135), (273, 136), (273, 137), (273, 138), (273, 139), (273, 140), (273, 141), (273, 142), (273, 143), (273, 144),
(273, 145), (273, 147), (274, 116), (274, 118), (274, 119), (274, 120), (274, 121), (274, 122), (274, 123), (274, 124), (274, 125), (274, 126), (274, 127), (274, 128), (274, 129), (274, 130), (274, 131), (274, 132), (274, 133), (274, 134), (274, 135), (274, 136), (274, 137), (274, 138), (274, 139), (274, 140), (274, 141), (274, 142), (274, 143), (274, 144), (274, 146), (275, 116), (275, 118), (275, 119), (275, 120), (275, 121), (275, 122), (275, 123), (275, 124), (275, 125), (275, 126), (275, 127), (275, 128), (275, 129), (275, 130), (275, 131), (275, 132), (275, 133), (275, 134), (275, 135), (275, 136), (275, 137), (275, 138), (275, 139), (275, 140), (275, 141), (275, 142), (275, 143), (275, 145), (276, 107), (276, 108), (276, 116), (276, 118), (276, 119), (276, 120), (276, 121), (276, 122), (276, 123), (276, 124), (276, 125), (276, 126), (276, 127),
(276, 128), (276, 129), (276, 130), (276, 131), (276, 132), (276, 133), (276, 134), (276, 135), (276, 136), (276, 137), (276, 138), (276, 139), (276, 140), (276, 141), (276, 142), (276, 144), (277, 106), (277, 109), (277, 111), (277, 116), (277, 118), (277, 119), (277, 120), (277, 121), (277, 122), (277, 123), (277, 124), (277, 125), (277, 126), (277, 127), (277, 128), (277, 129), (277, 130), (277, 131), (277, 132), (277, 133), (277, 134), (277, 135), (277, 136), (277, 137), (277, 138), (277, 139), (277, 140), (277, 141), (277, 143), (278, 105), (278, 107), (278, 108), (278, 112), (278, 113), (278, 115), (278, 116), (278, 117), (278, 118), (278, 119), (278, 120), (278, 121), (278, 122), (278, 123), (278, 124), (278, 125), (278, 126), (278, 127), (278, 128), (278, 129), (278, 130), (278, 131), (278, 132), (278, 133), (278, 134), (278, 135), (278, 136),
(278, 137), (278, 138), (278, 139), (278, 140), (278, 141), (278, 143), (279, 105), (279, 108), (279, 109), (279, 110), (279, 111), (279, 116), (279, 117), (279, 118), (279, 119), (279, 120), (279, 121), (279, 122), (279, 123), (279, 124), (279, 125), (279, 126), (279, 127), (279, 128), (279, 129), (279, 130), (279, 131), (279, 132), (279, 133), (279, 134), (279, 135), (279, 136), (279, 137), (279, 138), (279, 139), (279, 140), (280, 106), (280, 109), (280, 110), (280, 111), (280, 112), (280, 113), (280, 114), (280, 115), (280, 116), (280, 117), (280, 118), (280, 119), (280, 120), (280, 121), (280, 122), (280, 123), (280, 124), (280, 125), (280, 126), (280, 127), (280, 128), (280, 129), (280, 130), (280, 131), (280, 132), (280, 133), (280, 134), (280, 135), (280, 136), (280, 137), (280, 138), (280, 139), (280, 142), (281, 108), (281, 111), (281, 112),
(281, 113), (281, 114), (281, 115), (281, 116), (281, 141), (282, 109), (282, 112), (282, 113), (282, 114), (282, 115), (282, 118), (282, 119), (282, 120), (282, 121), (282, 122), (282, 123), (282, 124), (282, 125), (282, 126), (282, 127), (282, 128), (282, 129), (282, 130), (282, 131), (282, 132), (282, 133), (282, 134), (282, 135), (282, 136), (282, 137), (282, 139), (283, 111), (283, 114), (283, 116), (284, 112), (284, 115), (285, 114), )
coordinates_FFCC00 = ((168, 71),
(169, 68), (169, 71), (170, 66), (170, 71), (171, 64), (171, 68), (171, 69), (171, 71), (172, 63), (172, 66), (172, 67), (172, 68), (172, 69), (172, 70), (172, 71), (173, 61), (173, 64), (173, 65), (173, 66), (173, 67), (173, 68), (173, 70), (174, 35), (174, 36), (174, 37), (174, 38), (174, 39), (174, 40), (174, 41), (174, 42), (174, 43), (174, 44), (174, 45), (174, 46), (174, 55), (174, 56), (174, 57), (174, 58), (174, 59), (174, 60), (174, 63), (174, 64), (174, 65), (174, 66), (174, 67), (174, 68), (174, 70), (175, 32), (175, 34), (175, 46), (175, 47), (175, 48), (175, 49), (175, 50), (175, 51), (175, 52), (175, 53), (175, 54), (175, 61), (175, 62), (175, 63), (175, 64), (175, 65), (175, 66), (175, 67), (175, 68), (175, 70), (176, 30), (176, 35), (176, 36), (176, 37), (176, 38),
(176, 39), (176, 40), (176, 41), (176, 42), (176, 43), (176, 44), (176, 45), (176, 46), (176, 55), (176, 56), (176, 57), (176, 58), (176, 59), (176, 60), (176, 61), (176, 62), (176, 63), (176, 64), (176, 65), (176, 66), (176, 67), (176, 69), (177, 29), (177, 32), (177, 33), (177, 34), (177, 35), (177, 36), (177, 37), (177, 38), (177, 39), (177, 40), (177, 41), (177, 42), (177, 43), (177, 44), (177, 45), (177, 46), (177, 47), (177, 48), (177, 49), (177, 50), (177, 51), (177, 52), (177, 53), (177, 54), (177, 55), (177, 56), (177, 57), (177, 58), (177, 59), (177, 60), (177, 61), (177, 62), (177, 63), (177, 64), (177, 65), (177, 66), (177, 67), (177, 69), (178, 27), (178, 30), (178, 31), (178, 32), (178, 33), (178, 34), (178, 35), (178, 36), (178, 37), (178, 38), (178, 39), (178, 40),
(178, 41), (178, 42), (178, 43), (178, 44), (178, 45), (178, 46), (178, 47), (178, 48), (178, 49), (178, 50), (178, 51), (178, 52), (178, 53), (178, 54), (178, 55), (178, 56), (178, 57), (178, 58), (178, 59), (178, 60), (178, 61), (178, 62), (178, 63), (178, 64), (178, 65), (178, 66), (178, 68), (179, 26), (179, 29), (179, 30), (179, 31), (179, 32), (179, 33), (179, 34), (179, 35), (179, 36), (179, 37), (179, 38), (179, 39), (179, 40), (179, 41), (179, 42), (179, 43), (179, 44), (179, 45), (179, 46), (179, 47), (179, 48), (179, 49), (179, 50), (179, 51), (179, 52), (179, 53), (179, 54), (179, 55), (179, 56), (179, 57), (179, 58), (179, 59), (179, 60), (179, 61), (179, 62), (179, 63), (179, 64), (179, 65), (179, 67), (180, 25), (180, 27), (180, 28), (180, 29), (180, 30), (180, 31),
(180, 32), (180, 33), (180, 34), (180, 35), (180, 36), (180, 37), (180, 38), (180, 39), (180, 40), (180, 41), (180, 42), (180, 43), (180, 44), (180, 45), (180, 46), (180, 47), (180, 48), (180, 49), (180, 50), (180, 51), (180, 52), (180, 53), (180, 54), (180, 55), (180, 56), (180, 57), (180, 58), (180, 59), (180, 60), (180, 61), (180, 62), (180, 63), (180, 64), (180, 66), (181, 24), (181, 26), (181, 27), (181, 28), (181, 29), (181, 30), (181, 31), (181, 32), (181, 33), (181, 34), (181, 35), (181, 36), (181, 37), (181, 38), (181, 39), (181, 40), (181, 41), (181, 42), (181, 43), (181, 44), (181, 45), (181, 46), (181, 47), (181, 48), (181, 49), (181, 50), (181, 51), (181, 52), (181, 53), (181, 54), (181, 55), (181, 56), (181, 57), (181, 58), (181, 59), (181, 60), (181, 61), (181, 62),
(181, 65), (182, 23), (182, 25), (182, 26), (182, 27), (182, 28), (182, 29), (182, 30), (182, 31), (182, 32), (182, 33), (182, 34), (182, 35), (182, 36), (182, 37), (182, 38), (182, 39), (182, 40), (182, 41), (182, 42), (182, 43), (182, 44), (182, 45), (182, 46), (182, 47), (182, 48), (182, 49), (182, 50), (182, 51), (182, 52), (182, 53), (182, 54), (182, 55), (182, 56), (182, 57), (182, 58), (182, 59), (182, 60), (182, 64), (183, 22), (183, 24), (183, 25), (183, 26), (183, 27), (183, 28), (183, 29), (183, 30), (183, 31), (183, 32), (183, 33), (183, 34), (183, 35), (183, 36), (183, 37), (183, 38), (183, 39), (183, 40), (183, 41), (183, 42), (183, 43), (183, 44), (183, 45), (183, 46), (183, 47), (183, 48), (183, 49), (183, 50), (183, 51), (183, 52), (183, 53), (183, 54), (183, 55),
(183, 56), (183, 57), (183, 62), (184, 21), (184, 23), (184, 24), (184, 25), (184, 26), (184, 27), (184, 28), (184, 29), (184, 30), (184, 31), (184, 32), (184, 33), (184, 34), (184, 35), (184, 36), (184, 37), (184, 38), (184, 39), (184, 40), (184, 41), (184, 42), (184, 43), (184, 44), (184, 45), (184, 46), (184, 47), (184, 48), (184, 49), (184, 50), (184, 51), (184, 52), (184, 53), (184, 54), (184, 58), (184, 60), (185, 20), (185, 22), (185, 23), (185, 24), (185, 25), (185, 26), (185, 27), (185, 28), (185, 29), (185, 30), (185, 31), (185, 32), (185, 33), (185, 34), (185, 35), (185, 36), (185, 37), (185, 38), (185, 39), (185, 40), (185, 41), (185, 42), (185, 43), (185, 44), (185, 45), (185, 46), (185, 47), (185, 48), (185, 49), (185, 50), (185, 51), (185, 55), (185, 57), (186, 19),
(186, 21), (186, 22), (186, 23), (186, 24), (186, 25), (186, 26), (186, 27), (186, 28), (186, 29), (186, 30), (186, 31), (186, 32), (186, 33), (186, 34), (186, 35), (186, 36), (186, 37), (186, 38), (186, 39), (186, 40), (186, 41), (186, 42), (186, 43), (186, 44), (186, 45), (186, 46), (186, 47), (186, 48), (186, 52), (186, 54), (187, 18), (187, 20), (187, 21), (187, 22), (187, 23), (187, 24), (187, 25), (187, 26), (187, 27), (187, 28), (187, 29), (187, 30), (187, 31), (187, 32), (187, 33), (187, 34), (187, 35), (187, 36), (187, 37), (187, 38), (187, 39), (187, 40), (187, 41), (187, 42), (187, 43), (187, 44), (187, 45), (187, 49), (187, 50), (187, 51), (188, 20), (188, 21), (188, 22), (188, 23), (188, 24), (188, 25), (188, 26), (188, 27), (188, 28), (188, 29), (188, 30), (188, 31),
(188, 32), (188, 33), (188, 34), (188, 35), (188, 36), (188, 37), (188, 38), (188, 39), (188, 40), (188, 41), (188, 42), (188, 43), (188, 46), (188, 47), (188, 48), (189, 17), (189, 19), (189, 20), (189, 21), (189, 22), (189, 23), (189, 24), (189, 25), (189, 26), (189, 27), (189, 28), (189, 29), (189, 30), (189, 31), (189, 32), (189, 33), (189, 34), (189, 35), (189, 44), (189, 45), (190, 16), (190, 18), (190, 19), (190, 20), (190, 21), (190, 22), (190, 23), (190, 24), (190, 25), (190, 26), (190, 27), (190, 28), (190, 29), (190, 30), (190, 31), (190, 32), (190, 33), (190, 36), (190, 37), (190, 38), (190, 39), (190, 40), (190, 41), (190, 42), (191, 15), (191, 17), (191, 18), (191, 19), (191, 20), (191, 21), (191, 22), (191, 23), (191, 24), (191, 25), (191, 26), (191, 27), (191, 28),
(191, 29), (191, 30), (191, 31), (191, 32), (192, 15), (192, 17), (192, 18), (192, 19), (192, 20), (192, 21), (192, 22), (192, 23), (192, 24), (192, 25), (192, 26), (192, 27), (192, 28), (192, 29), (192, 30), (192, 31), (192, 33), (193, 14), (193, 16), (193, 17), (193, 18), (193, 19), (193, 20), (193, 21), (193, 22), (193, 23), (193, 24), (193, 25), (193, 26), (193, 27), (193, 28), (193, 29), (193, 30), (193, 32), (194, 13), (194, 15), (194, 16), (194, 17), (194, 18), (194, 19), (194, 20), (194, 21), (194, 22), (194, 23), (194, 24), (194, 25), (194, 26), (194, 27), (194, 28), (194, 29), (194, 31), (195, 13), (195, 15), (195, 16), (195, 17), (195, 18), (195, 19), (195, 20), (195, 21), (195, 22), (195, 23), (195, 24), (195, 25), (195, 26), (195, 27), (195, 28), (195, 29), (195, 31),
(195, 55), (196, 12), (196, 14), (196, 15), (196, 16), (196, 17), (196, 18), (196, 19), (196, 20), (196, 21), (196, 22), (196, 23), (196, 24), (196, 25), (196, 26), (196, 27), (196, 28), (196, 30), (196, 51), (196, 52), (196, 54), (197, 12), (197, 14), (197, 15), (197, 16), (197, 17), (197, 18), (197, 19), (197, 20), (197, 21), (197, 22), (197, 23), (197, 24), (197, 25), (197, 26), (197, 27), (197, 29), (197, 48), (197, 54), (198, 11), (198, 13), (198, 14), (198, 15), (198, 16), (198, 17), (198, 18), (198, 19), (198, 20), (198, 21), (198, 22), (198, 23), (198, 24), (198, 25), (198, 26), (198, 27), (198, 29), (198, 46), (198, 50), (198, 53), (199, 11), (199, 13), (199, 14), (199, 15), (199, 16), (199, 17), (199, 18), (199, 19), (199, 20), (199, 21), (199, 22), (199, 23), (199, 24),
(199, 25), (199, 26), (199, 28), (199, 44), (199, 48), (199, 49), (199, 53), (200, 10), (200, 12), (200, 13), (200, 14), (200, 15), (200, 16), (200, 17), (200, 18), (200, 19), (200, 20), (200, 21), (200, 22), (200, 23), (200, 24), (200, 25), (200, 26), (200, 28), (200, 42), (200, 46), (200, 47), (200, 48), (200, 50), (201, 10), (201, 12), (201, 13), (201, 14), (201, 15), (201, 16), (201, 17), (201, 18), (201, 19), (201, 20), (201, 21), (201, 22), (201, 23), (201, 24), (201, 25), (201, 26), (201, 28), (201, 40), (201, 44), (201, 45), (201, 46), (201, 47), (201, 49), (202, 10), (202, 12), (202, 13), (202, 14), (202, 15), (202, 16), (202, 17), (202, 18), (202, 19), (202, 20), (202, 21), (202, 22), (202, 23), (202, 24), (202, 25), (202, 26), (202, 27), (202, 29), (202, 38), (202, 42),
(202, 43), (202, 44), (202, 45), (202, 46), (202, 48), (203, 10), (203, 12), (203, 13), (203, 14), (203, 15), (203, 16), (203, 17), (203, 18), (203, 19), (203, 20), (203, 21), (203, 22), (203, 23), (203, 24), (203, 25), (203, 26), (203, 27), (203, 28), (203, 30), (203, 36), (203, 40), (203, 41), (203, 42), (203, 43), (203, 44), (203, 45), (203, 47), (204, 10), (204, 12), (204, 13), (204, 14), (204, 15), (204, 16), (204, 17), (204, 18), (204, 19), (204, 20), (204, 21), (204, 22), (204, 23), (204, 24), (204, 25), (204, 26), (204, 27), (204, 28), (204, 29), (204, 31), (204, 32), (204, 33), (204, 34), (204, 38), (204, 39), (204, 40), (204, 41), (204, 42), (204, 43), (204, 44), (204, 46), (205, 10), (205, 12), (205, 13), (205, 14), (205, 15), (205, 16), (205, 17), (205, 18), (205, 19),
(205, 20), (205, 21), (205, 22), (205, 23), (205, 24), (205, 25), (205, 26), (205, 27), (205, 28), (205, 29), (205, 30), (205, 35), (205, 36), (205, 37), (205, 38), (205, 39), (205, 40), (205, 41), (205, 42), (205, 43), (205, 45), (206, 10), (206, 12), (206, 13), (206, 14), (206, 15), (206, 16), (206, 17), (206, 18), (206, 19), (206, 20), (206, 21), (206, 22), (206, 23), (206, 24), (206, 25), (206, 26), (206, 27), (206, 28), (206, 29), (206, 30), (206, 31), (206, 32), (206, 33), (206, 34), (206, 35), (206, 36), (206, 37), (206, 38), (206, 39), (206, 40), (206, 41), (206, 42), (206, 44), (207, 10), (207, 12), (207, 13), (207, 14), (207, 15), (207, 16), (207, 17), (207, 18), (207, 19), (207, 20), (207, 21), (207, 22), (207, 23), (207, 24), (207, 25), (207, 26), (207, 27), (207, 28),
(207, 29), (207, 30), (207, 31), (207, 32), (207, 33), (207, 34), (207, 35), (207, 36), (207, 37), (207, 38), (207, 39), (207, 40), (207, 41), (207, 42), (207, 44), (208, 10), (208, 12), (208, 13), (208, 14), (208, 15), (208, 16), (208, 17), (208, 18), (208, 19), (208, 20), (208, 21), (208, 22), (208, 23), (208, 24), (208, 25), (208, 26), (208, 27), (208, 28), (208, 29), (208, 30), (208, 31), (208, 32), (208, 33), (208, 34), (208, 35), (208, 36), (208, 37), (208, 38), (208, 39), (208, 40), (208, 41), (208, 43), (209, 10), (209, 12), (209, 13), (209, 14), (209, 15), (209, 16), (209, 17), (209, 18), (209, 19), (209, 20), (209, 21), (209, 22), (209, 23), (209, 24), (209, 25), (209, 26), (209, 27), (209, 28), (209, 29), (209, 30), (209, 31), (209, 32), (209, 33), (209, 34), (209, 35),
(209, 36), (209, 37), (209, 38), (209, 39), (209, 40), (209, 41), (209, 43), (210, 11), (210, 13), (210, 14), (210, 15), (210, 16), (210, 17), (210, 18), (210, 19), (210, 20), (210, 21), (210, 22), (210, 23), (210, 24), (210, 25), (210, 26), (210, 27), (210, 28), (210, 29), (210, 30), (210, 31), (210, 32), (210, 33), (210, 34), (210, 35), (210, 36), (210, 37), (210, 38), (210, 39), (210, 40), (210, 42), (211, 11), (211, 13), (211, 14), (211, 15), (211, 16), (211, 17), (211, 18), (211, 19), (211, 20), (211, 21), (211, 22), (211, 23), (211, 24), (211, 25), (211, 26), (211, 27), (211, 28), (211, 29), (211, 30), (211, 31), (211, 32), (211, 33), (211, 34), (211, 35), (211, 36), (211, 37), (211, 38), (211, 39), (211, 40), (211, 42), (212, 11), (212, 13), (212, 14), (212, 15), (212, 16),
(212, 17), (212, 18), (212, 19), (212, 20), (212, 21), (212, 22), (212, 23), (212, 24), (212, 25), (212, 26), (212, 27), (212, 28), (212, 29), (212, 30), (212, 31), (212, 32), (212, 33), (212, 34), (212, 35), (212, 36), (212, 37), (212, 38), (212, 39), (212, 41), (213, 12), (213, 14), (213, 15), (213, 16), (213, 17), (213, 18), (213, 19), (213, 20), (213, 21), (213, 22), (213, 23), (213, 24), (213, 25), (213, 26), (213, 27), (213, 28), (213, 29), (213, 30), (213, 31), (213, 32), (213, 33), (213, 34), (213, 35), (213, 36), (213, 37), (213, 38), (213, 39), (213, 41), (214, 12), (214, 14), (214, 15), (214, 16), (214, 17), (214, 18), (214, 19), (214, 20), (214, 21), (214, 22), (214, 23), (214, 24), (214, 25), (214, 26), (214, 27), (214, 28), (214, 29), (214, 30), (214, 31), (214, 32),
(214, 33), (214, 34), (214, 35), (214, 36), (214, 37), (214, 38), (214, 39), (214, 41), (215, 13), (215, 15), (215, 16), (215, 17), (215, 18), (215, 19), (215, 20), (215, 21), (215, 22), (215, 23), (215, 24), (215, 25), (215, 26), (215, 27), (215, 28), (215, 29), (215, 30), (215, 31), (215, 32), (215, 33), (215, 34), (215, 35), (215, 36), (215, 37), (215, 38), (215, 40), (216, 13), (216, 15), (216, 16), (216, 17), (216, 18), (216, 19), (216, 20), (216, 21), (216, 22), (216, 23), (216, 24), (216, 25), (216, 26), (216, 27), (216, 28), (216, 29), (216, 30), (216, 31), (216, 32), (216, 33), (216, 34), (216, 35), (216, 36), (216, 37), (216, 38), (216, 40), (217, 14), (217, 16), (217, 17), (217, 18), (217, 19), (217, 20), (217, 21), (217, 22), (217, 23), (217, 24), (217, 25), (217, 26),
(217, 27), (217, 28), (217, 29), (217, 30), (217, 31), (217, 32), (217, 33), (217, 34), (217, 35), (217, 36), (217, 37), (217, 38), (217, 40), (218, 15), (218, 18), (218, 19), (218, 20), (218, 21), (218, 22), (218, 23), (218, 24), (218, 25), (218, 26), (218, 27), (218, 28), (218, 29), (218, 30), (218, 31), (218, 32), (218, 33), (218, 34), (218, 35), (218, 36), (218, 37), (218, 39), (219, 16), (219, 38), (220, 18), (220, 19), (220, 20), (220, 21), (220, 22), (220, 23), (220, 24), (220, 25), (220, 26), (220, 27), (220, 28), (220, 29), (220, 30), (220, 31), (220, 32), (220, 33), (220, 34), (220, 35), (220, 37), (225, 20), (225, 22), (225, 23), (225, 24), (225, 25), (225, 26), (225, 27), (225, 28), (225, 29), (225, 30), (225, 31), (225, 32), (225, 33), (225, 34), (225, 35), (225, 36),
(225, 38), (226, 19), (226, 39), (227, 18), (227, 20), (227, 21), (227, 22), (227, 23), (227, 24), (227, 25), (227, 26), (227, 27), (227, 28), (227, 29), (227, 30), (227, 31), (227, 32), (227, 33), (227, 34), (227, 35), (227, 36), (227, 37), (227, 38), (227, 40), (228, 16), (228, 19), (228, 20), (228, 21), (228, 22), (228, 23), (228, 24), (228, 25), (228, 26), (228, 27), (228, 28), (228, 29), (228, 30), (228, 31), (228, 32), (228, 33), (228, 34), (228, 35), (228, 36), (228, 37), (228, 38), (228, 40), (229, 14), (229, 18), (229, 19), (229, 20), (229, 21), (229, 22), (229, 23), (229, 24), (229, 25), (229, 26), (229, 27), (229, 28), (229, 29), (229, 30), (229, 31), (229, 32), (229, 33), (229, 34), (229, 35), (229, 36), (229, 37), (229, 38), (229, 40), (230, 12), (230, 16), (230, 17),
(230, 18), (230, 19), (230, 20), (230, 21), (230, 22), (230, 23), (230, 24), (230, 25), (230, 26), (230, 27), (230, 28), (230, 29), (230, 30), (230, 31), (230, 32), (230, 33), (230, 34), (230, 35), (230, 36), (230, 37), (230, 38), (230, 40), (231, 11), (231, 14), (231, 15), (231, 16), (231, 17), (231, 18), (231, 19), (231, 20), (231, 21), (231, 22), (231, 23), (231, 24), (231, 25), (231, 26), (231, 27), (231, 28), (231, 29), (231, 30), (231, 31), (231, 32), (231, 33), (231, 34), (231, 35), (231, 36), (231, 37), (231, 38), (231, 39), (231, 41), (232, 11), (232, 13), (232, 14), (232, 15), (232, 16), (232, 17), (232, 18), (232, 19), (232, 20), (232, 21), (232, 22), (232, 23), (232, 24), (232, 25), (232, 26), (232, 27), (232, 28), (232, 29), (232, 30), (232, 31), (232, 32), (232, 33),
(232, 34), (232, 35), (232, 36), (232, 37), (232, 38), (232, 39), (232, 41), (233, 10), (233, 12), (233, 13), (233, 14), (233, 15), (233, 16), (233, 17), (233, 18), (233, 19), (233, 20), (233, 21), (233, 22), (233, 23), (233, 24), (233, 25), (233, 26), (233, 27), (233, 28), (233, 29), (233, 30), (233, 31), (233, 32), (233, 33), (233, 34), (233, 35), (233, 36), (233, 37), (233, 38), (233, 39), (233, 41), (234, 10), (234, 12), (234, 13), (234, 14), (234, 15), (234, 16), (234, 17), (234, 18), (234, 19), (234, 20), (234, 21), (234, 22), (234, 23), (234, 24), (234, 25), (234, 26), (234, 27), (234, 28), (234, 29), (234, 30), (234, 31), (234, 32), (234, 33), (234, 34), (234, 35), (234, 36), (234, 37), (234, 38), (234, 39), (234, 40), (234, 42), (235, 10), (235, 12), (235, 13), (235, 14),
(235, 15), (235, 16), (235, 17), (235, 18), (235, 19), (235, 20), (235, 21), (235, 22), (235, 23), (235, 24), (235, 25), (235, 26), (235, 27), (235, 28), (235, 29), (235, 30), (235, 31), (235, 32), (235, 33), (235, 34), (235, 35), (235, 36), (235, 37), (235, 38), (235, 39), (235, 40), (235, 42), (236, 10), (236, 12), (236, 13), (236, 14), (236, 15), (236, 16), (236, 17), (236, 18), (236, 19), (236, 20), (236, 21), (236, 22), (236, 23), (236, 24), (236, 25), (236, 26), (236, 27), (236, 28), (236, 29), (236, 30), (236, 31), (236, 32), (236, 33), (236, 34), (236, 35), (236, 36), (236, 37), (236, 38), (236, 39), (236, 40), (236, 41), (236, 43), (237, 10), (237, 12), (237, 13), (237, 14), (237, 15), (237, 16), (237, 17), (237, 18), (237, 19), (237, 20), (237, 21), (237, 22), (237, 23),
(237, 24), (237, 25), (237, 26), (237, 27), (237, 28), (237, 29), (237, 30), (237, 31), (237, 32), (237, 33), (237, 34), (237, 35), (237, 36), (237, 37), (237, 38), (237, 39), (237, 40), (237, 41), (237, 42), (237, 44), (238, 10), (238, 12), (238, 13), (238, 14), (238, 15), (238, 16), (238, 17), (238, 18), (238, 19), (238, 20), (238, 21), (238, 22), (238, 23), (238, 24), (238, 25), (238, 26), (238, 27), (238, 28), (238, 29), (238, 30), (238, 31), (238, 32), (238, 33), (238, 34), (238, 35), (238, 36), (238, 37), (238, 38), (238, 39), (238, 40), (238, 41), (238, 42), (238, 44), (239, 10), (239, 12), (239, 13), (239, 14), (239, 15), (239, 16), (239, 17), (239, 18), (239, 19), (239, 20), (239, 21), (239, 22), (239, 23), (239, 24), (239, 25), (239, 26), (239, 27), (239, 28), (239, 29),
(239, 30), (239, 31), (239, 32), (239, 33), (239, 34), (239, 35), (239, 36), (239, 37), (239, 38), (239, 39), (239, 40), (239, 41), (239, 42), (239, 43), (239, 45), (240, 10), (240, 12), (240, 13), (240, 14), (240, 15), (240, 16), (240, 17), (240, 18), (240, 19), (240, 20), (240, 21), (240, 22), (240, 23), (240, 24), (240, 25), (240, 26), (240, 27), (240, 28), (240, 29), (240, 30), (240, 31), (240, 32), (240, 33), (240, 34), (240, 35), (240, 36), (240, 37), (240, 38), (240, 39), (240, 40), (240, 41), (240, 42), (240, 43), (240, 44), (240, 46), (241, 10), (241, 12), (241, 13), (241, 14), (241, 15), (241, 16), (241, 17), (241, 18), (241, 19), (241, 20), (241, 21), (241, 22), (241, 23), (241, 24), (241, 25), (241, 26), (241, 27), (241, 28), (241, 29), (241, 30), (241, 31), (241, 32),
(241, 33), (241, 34), (241, 35), (241, 36), (241, 37), (241, 38), (241, 39), (241, 40), (241, 41), (241, 42), (241, 43), (241, 44), (241, 45), (241, 47), (242, 10), (242, 12), (242, 13), (242, 14), (242, 15), (242, 16), (242, 17), (242, 18), (242, 19), (242, 20), (242, 21), (242, 22), (242, 23), (242, 24), (242, 25), (242, 26), (242, 27), (242, 28), (242, 29), (242, 30), (242, 31), (242, 32), (242, 33), (242, 34), (242, 35), (242, 36), (242, 37), (242, 38), (242, 39), (242, 40), (242, 41), (242, 42), (242, 43), (242, 44), (242, 45), (242, 46), (242, 48), (243, 11), (243, 13), (243, 14), (243, 15), (243, 16), (243, 17), (243, 18), (243, 19), (243, 20), (243, 21), (243, 22), (243, 23), (243, 24), (243, 25), (243, 26), (243, 27), (243, 28), (243, 29), (243, 30), (243, 31), (243, 32),
(243, 33), (243, 34), (243, 40), (243, 41), (243, 42), (243, 43), (243, 44), (243, 45), (243, 46), (243, 47), (243, 50), (244, 11), (244, 13), (244, 14), (244, 15), (244, 16), (244, 17), (244, 18), (244, 19), (244, 20), (244, 21), (244, 22), (244, 23), (244, 24), (244, 25), (244, 26), (244, 27), (244, 28), (244, 29), (244, 30), (244, 31), (244, 32), (244, 33), (244, 36), (244, 37), (244, 38), (244, 39), (244, 43), (244, 44), (244, 45), (244, 46), (244, 47), (244, 48), (244, 51), (245, 11), (245, 13), (245, 14), (245, 15), (245, 16), (245, 17), (245, 18), (245, 19), (245, 20), (245, 21), (245, 22), (245, 23), (245, 24), (245, 25), (245, 26), (245, 27), (245, 28), (245, 29), (245, 30), (245, 31), (245, 32), (245, 34), (245, 40), (245, 41), (245, 42), (245, 46), (245, 47), (245, 48),
(245, 49), (245, 50), (245, 53), (245, 54), (246, 12), (246, 14), (246, 15), (246, 16), (246, 17), (246, 18), (246, 19), (246, 20), (246, 21), (246, 22), (246, 23), (246, 24), (246, 25), (246, 26), (246, 27), (246, 28), (246, 29), (246, 30), (246, 31), (246, 33), (246, 43), (246, 45), (246, 48), (246, 49), (246, 50), (246, 51), (246, 55), (246, 56), (246, 58), (247, 12), (247, 14), (247, 15), (247, 16), (247, 17), (247, 18), (247, 19), (247, 20), (247, 21), (247, 22), (247, 23), (247, 24), (247, 25), (247, 26), (247, 27), (247, 28), (247, 29), (247, 30), (247, 32), (247, 46), (247, 47), (247, 51), (247, 52), (247, 53), (247, 54), (248, 13), (248, 15), (248, 16), (248, 17), (248, 18), (248, 19), (248, 20), (248, 21), (248, 22), (248, 23), (248, 24), (248, 25), (248, 26), (248, 27),
(248, 28), (248, 29), (248, 31), (248, 48), (248, 50), (248, 59), (249, 13), (249, 15), (249, 16), (249, 17), (249, 18), (249, 19), (249, 20), (249, 21), (249, 22), (249, 23), (249, 24), (249, 25), (249, 26), (249, 27), (249, 28), (249, 29), (249, 31), (249, 51), (249, 53), (249, 54), (249, 55), (249, 56), (249, 57), (249, 59), (250, 14), (250, 16), (250, 17), (250, 18), (250, 19), (250, 20), (250, 21), (250, 22), (250, 23), (250, 24), (250, 25), (250, 26), (250, 27), (250, 28), (250, 29), (250, 31), (250, 59), (251, 14), (251, 16), (251, 17), (251, 18), (251, 19), (251, 20), (251, 21), (251, 22), (251, 23), (251, 24), (251, 25), (251, 26), (251, 27), (251, 28), (251, 29), (251, 30), (251, 32), (252, 15), (252, 17), (252, 18), (252, 19), (252, 20), (252, 21), (252, 22), (252, 23),
(252, 24), (252, 25), (252, 26), (252, 27), (252, 28), (252, 29), (252, 30), (252, 31), (252, 34), (253, 15), (253, 17), (253, 18), (253, 19), (253, 20), (253, 21), (253, 22), (253, 23), (253, 24), (253, 25), (253, 26), (253, 27), (253, 28), (253, 29), (253, 30), (253, 31), (253, 32), (253, 36), (253, 37), (254, 16), (254, 18), (254, 19), (254, 20), (254, 21), (254, 22), (254, 23), (254, 24), (254, 25), (254, 26), (254, 27), (254, 28), (254, 29), (254, 30), (254, 31), (254, 32), (254, 33), (254, 34), (254, 38), (254, 40), (255, 17), (255, 19), (255, 20), (255, 21), (255, 22), (255, 23), (255, 24), (255, 25), (255, 26), (255, 27), (255, 28), (255, 29), (255, 30), (255, 31), (255, 32), (255, 33), (255, 34), (255, 35), (255, 36), (255, 37), (255, 41), (255, 42), (255, 43), (256, 18),
(256, 20), (256, 21), (256, 22), (256, 23), (256, 24), (256, 25), (256, 26), (256, 27), (256, 28), (256, 29), (256, 30), (256, 31), (256, 32), (256, 33), (256, 34), (256, 35), (256, 36), (256, 37), (256, 38), (256, 39), (256, 40), (256, 44), (256, 45), (257, 18), (257, 20), (257, 21), (257, 22), (257, 23), (257, 24), (257, 25), (257, 26), (257, 27), (257, 28), (257, 29), (257, 30), (257, 31), (257, 32), (257, 33), (257, 34), (257, 35), (257, 36), (257, 37), (257, 38), (257, 39), (257, 40), (257, 41), (257, 42), (257, 43), (257, 47), (257, 48), (258, 19), (258, 21), (258, 22), (258, 23), (258, 24), (258, 25), (258, 26), (258, 27), (258, 28), (258, 29), (258, 30), (258, 31), (258, 32), (258, 33), (258, 34), (258, 35), (258, 36), (258, 37), (258, 38), (258, 39), (258, 40), (258, 41),
(258, 42), (258, 43), (258, 44), (258, 45), (258, 46), (258, 49), (258, 51), (259, 20), (259, 22), (259, 23), (259, 24), (259, 25), (259, 26), (259, 27), (259, 28), (259, 29), (259, 30), (259, 31), (259, 32), (259, 33), (259, 34), (259, 35), (259, 36), (259, 37), (259, 38), (259, 39), (259, 40), (259, 41), (259, 42), (259, 43), (259, 44), (259, 45), (259, 46), (259, 47), (259, 48), (259, 52), (259, 53), (260, 21), (260, 23), (260, 24), (260, 25), (260, 26), (260, 27), (260, 28), (260, 29), (260, 30), (260, 31), (260, 32), (260, 33), (260, 34), (260, 35), (260, 36), (260, 37), (260, 38), (260, 39), (260, 40), (260, 41), (260, 42), (260, 43), (260, 44), (260, 45), (260, 46), (260, 47), (260, 48), (260, 49), (260, 50), (260, 51), (260, 56), (261, 22), (261, 24), (261, 25), (261, 26),
(261, 27), (261, 28), (261, 29), (261, 30), (261, 31), (261, 32), (261, 33), (261, 34), (261, 35), (261, 36), (261, 37), (261, 38), (261, 39), (261, 40), (261, 41), (261, 42), (261, 43), (261, 44), (261, 45), (261, 46), (261, 47), (261, 48), (261, 49), (261, 50), (261, 51), (261, 52), (261, 53), (261, 58), (262, 23), (262, 25), (262, 26), (262, 27), (262, 28), (262, 29), (262, 30), (262, 31), (262, 32), (262, 33), (262, 34), (262, 35), (262, 36), (262, 37), (262, 38), (262, 39), (262, 40), (262, 41), (262, 42), (262, 43), (262, 44), (262, 45), (262, 46), (262, 47), (262, 48), (262, 49), (262, 50), (262, 51), (262, 52), (262, 53), (262, 54), (262, 55), (262, 56), (262, 60), (263, 24), (263, 27), (263, 28), (263, 29), (263, 30), (263, 31), (263, 32), (263, 33), (263, 34), (263, 35),
(263, 36), (263, 37), (263, 38), (263, 39), (263, 40), (263, 41), (263, 42), (263, 43), (263, 44), (263, 45), (263, 46), (263, 47), (263, 48), (263, 49), (263, 50), (263, 51), (263, 52), (263, 53), (263, 54), (263, 55), (263, 56), (263, 57), (263, 58), (263, 62), (264, 25), (264, 28), (264, 29), (264, 30), (264, 31), (264, 32), (264, 33), (264, 34), (264, 35), (264, 36), (264, 37), (264, 38), (264, 39), (264, 40), (264, 41), (264, 42), (264, 43), (264, 44), (264, 45), (264, 46), (264, 47), (264, 48), (264, 49), (264, 50), (264, 51), (264, 52), (264, 53), (264, 54), (264, 55), (264, 56), (264, 57), (264, 58), (264, 59), (264, 60), (264, 64), (265, 27), (265, 30), (265, 31), (265, 32), (265, 33), (265, 34), (265, 35), (265, 36), (265, 37), (265, 38), (265, 39), (265, 40), (265, 41),
(265, 42), (265, 43), (265, 44), (265, 45), (265, 46), (265, 47), (265, 48), (265, 49), (265, 50), (265, 51), (265, 52), (265, 53), (265, 54), (265, 55), (265, 56), (265, 57), (265, 58), (265, 59), (265, 60), (265, 61), (265, 62), (265, 66), (266, 28), (266, 31), (266, 32), (266, 33), (266, 34), (266, 35), (266, 36), (266, 37), (266, 38), (266, 39), (266, 40), (266, 41), (266, 42), (266, 43), (266, 44), (266, 45), (266, 46), (266, 47), (266, 48), (266, 49), (266, 50), (266, 51), (266, 52), (266, 53), (266, 54), (266, 55), (266, 56), (266, 57), (266, 58), (266, 59), (266, 60), (266, 61), (266, 62), (266, 63), (266, 64), (266, 67), (267, 30), (267, 33), (267, 34), (267, 35), (267, 36), (267, 37), (267, 38), (267, 39), (267, 40), (267, 41), (267, 42), (267, 43), (267, 44), (267, 45),
(267, 46), (267, 47), (267, 48), (267, 49), (267, 50), (267, 51), (267, 52), (267, 53), (267, 54), (267, 55), (267, 56), (267, 57), (267, 58), (267, 59), (267, 60), (267, 61), (267, 62), (267, 63), (267, 64), (267, 65), (267, 66), (267, 68), (268, 31), (268, 32), (268, 36), (268, 37), (268, 38), (268, 39), (268, 40), (268, 41), (268, 42), (268, 43), (268, 44), (268, 45), (268, 46), (268, 47), (268, 48), (268, 49), (268, 50), (268, 51), (268, 52), (268, 53), (268, 54), (268, 55), (268, 56), (268, 57), (268, 58), (268, 59), (268, 60), (268, 61), (268, 62), (268, 63), (268, 64), (268, 65), (268, 66), (268, 67), (268, 69), (269, 33), (269, 34), (269, 35), (269, 59), (269, 60), (269, 61), (269, 62), (269, 63), (269, 64), (269, 65), (269, 66), (269, 67), (269, 68), (269, 70), (270, 36),
(270, 38), (270, 39), (270, 40), (270, 41), (270, 42), (270, 43), (270, 44), (270, 45), (270, 46), (270, 47), (270, 48), (270, 49), (270, 50), (270, 51), (270, 52), (270, 53), (270, 54), (270, 55), (270, 56), (270, 57), (270, 58), (270, 62), (270, 63), (270, 64), (270, 65), (270, 66), (270, 67), (270, 68), (270, 71), (271, 60), (271, 61), (271, 63), (271, 64), (271, 65), (271, 66), (271, 69), (271, 71), (272, 62), (272, 68), (273, 63), (273, 66), )
coordinates_CCCC99 = ((89, 147),
(89, 148), (89, 149), (89, 150), (89, 151), (89, 152), (89, 153), (89, 154), (89, 155), (89, 156), (89, 157), (89, 158), (89, 159), (89, 160), (90, 144), (90, 146), (90, 161), (90, 162), (90, 163), (90, 164), (91, 142), (91, 147), (91, 148), (91, 149), (91, 150), (91, 151), (91, 152), (91, 153), (91, 154), (91, 155), (91, 156), (91, 157), (91, 158), (91, 159), (91, 160), (91, 165), (91, 166), (91, 167), (92, 141), (92, 144), (92, 145), (92, 146), (92, 147), (92, 148), (92, 149), (92, 150), (92, 151), (92, 152), (92, 153), (92, 154), (92, 155), (92, 156), (92, 157), (92, 158), (92, 159), (92, 160), (92, 161), (92, 162), (92, 163), (92, 164), (92, 170), (93, 140), (93, 142), (93, 143), (93, 144), (93, 145), (93, 146), (93, 147), (93, 148), (93, 149), (93, 150), (93, 151), (93, 152),
(93, 153), (93, 154), (93, 155), (93, 156), (93, 157), (93, 158), (93, 159), (93, 160), (93, 161), (93, 162), (93, 163), (93, 164), (93, 165), (93, 166), (93, 167), (93, 168), (93, 172), (94, 140), (94, 142), (94, 143), (94, 144), (94, 145), (94, 146), (94, 147), (94, 148), (94, 149), (94, 150), (94, 151), (94, 152), (94, 153), (94, 154), (94, 155), (94, 156), (94, 157), (94, 158), (94, 159), (94, 160), (94, 161), (94, 162), (94, 163), (94, 164), (94, 165), (94, 166), (94, 167), (94, 168), (94, 169), (94, 170), (94, 174), (95, 139), (95, 141), (95, 142), (95, 143), (95, 144), (95, 145), (95, 146), (95, 147), (95, 148), (95, 149), (95, 150), (95, 151), (95, 152), (95, 153), (95, 154), (95, 155), (95, 156), (95, 157), (95, 158), (95, 159), (95, 160), (95, 161), (95, 162), (95, 163),
(95, 164), (95, 165), (95, 166), (95, 167), (95, 168), (95, 169), (95, 170), (95, 171), (95, 172), (95, 175), (95, 176), (96, 139), (96, 141), (96, 142), (96, 143), (96, 144), (96, 145), (96, 146), (96, 147), (96, 148), (96, 149), (96, 150), (96, 151), (96, 152), (96, 153), (96, 154), (96, 155), (96, 156), (96, 157), (96, 158), (96, 159), (96, 160), (96, 161), (96, 162), (96, 163), (96, 164), (96, 165), (96, 166), (96, 167), (96, 168), (96, 169), (96, 170), (96, 171), (96, 172), (96, 173), (96, 174), (96, 177), (97, 139), (97, 141), (97, 142), (97, 143), (97, 144), (97, 145), (97, 146), (97, 147), (97, 148), (97, 149), (97, 150), (97, 151), (97, 152), (97, 153), (97, 154), (97, 155), (97, 156), (97, 157), (97, 158), (97, 159), (97, 160), (97, 161), (97, 162), (97, 163), (97, 164),
(97, 165), (97, 166), (97, 167), (97, 168), (97, 169), (97, 170), (97, 171), (97, 172), (97, 173), (97, 174), (97, 175), (97, 178), (98, 139), (98, 141), (98, 142), (98, 143), (98, 144), (98, 145), (98, 146), (98, 147), (98, 148), (98, 149), (98, 150), (98, 151), (98, 152), (98, 153), (98, 154), (98, 155), (98, 156), (98, 157), (98, 158), (98, 159), (98, 160), (98, 161), (98, 162), (98, 163), (98, 164), (98, 165), (98, 166), (98, 167), (98, 168), (98, 169), (98, 170), (98, 171), (98, 172), (98, 173), (98, 174), (98, 175), (98, 176), (98, 177), (98, 179), (99, 139), (99, 141), (99, 142), (99, 143), (99, 144), (99, 145), (99, 146), (99, 147), (99, 148), (99, 149), (99, 150), (99, 151), (99, 152), (99, 153), (99, 154), (99, 155), (99, 156), (99, 157), (99, 158), (99, 159), (99, 160),
(99, 161), (99, 162), (99, 163), (99, 164), (99, 165), (99, 166), (99, 167), (99, 168), (99, 169), (99, 170), (99, 171), (99, 172), (99, 173), (99, 174), (99, 175), (99, 176), (99, 177), (100, 139), (100, 141), (100, 142), (100, 143), (100, 144), (100, 145), (100, 146), (100, 147), (100, 148), (100, 149), (100, 150), (100, 151), (100, 152), (100, 153), (100, 154), (100, 155), (100, 156), (100, 157), (100, 158), (100, 159), (100, 160), (100, 161), (100, 162), (100, 163), (100, 164), (100, 165), (100, 166), (100, 167), (100, 168), (100, 169), (100, 170), (100, 171), (100, 172), (100, 173), (100, 174), (100, 175), (100, 176), (100, 177), (100, 178), (100, 180), (101, 139), (101, 141), (101, 142), (101, 143), (101, 144), (101, 145), (101, 146), (101, 147), (101, 148), (101, 149), (101, 150), (101, 151), (101, 152), (101, 153), (101, 154),
(101, 155), (101, 156), (101, 157), (101, 158), (101, 159), (101, 160), (101, 161), (101, 162), (101, 163), (101, 164), (101, 165), (101, 166), (101, 167), (101, 168), (101, 169), (101, 170), (101, 171), (101, 172), (101, 173), (101, 174), (101, 175), (101, 176), (101, 177), (101, 178), (101, 180), (102, 140), (102, 142), (102, 143), (102, 144), (102, 145), (102, 146), (102, 147), (102, 148), (102, 149), (102, 150), (102, 151), (102, 152), (102, 153), (102, 154), (102, 155), (102, 156), (102, 157), (102, 158), (102, 159), (102, 160), (102, 161), (102, 162), (102, 163), (102, 164), (102, 165), (102, 166), (102, 167), (102, 168), (102, 169), (102, 170), (102, 171), (102, 172), (102, 173), (102, 174), (102, 175), (102, 176), (102, 177), (102, 178), (102, 180), (103, 140), (103, 142), (103, 143), (103, 144), (103, 145), (103, 146), (103, 147), (103, 148),
(103, 149), (103, 150), (103, 151), (103, 152), (103, 153), (103, 154), (103, 155), (103, 156), (103, 157), (103, 158), (103, 159), (103, 160), (103, 161), (103, 162), (103, 163), (103, 164), (103, 165), (103, 166), (103, 167), (103, 168), (103, 169), (103, 170), (103, 171), (103, 172), (103, 173), (103, 174), (103, 175), (103, 176), (103, 177), (103, 178), (103, 180), (104, 140), (104, 142), (104, 143), (104, 144), (104, 145), (104, 146), (104, 147), (104, 148), (104, 149), (104, 150), (104, 151), (104, 152), (104, 153), (104, 154), (104, 155), (104, 156), (104, 157), (104, 158), (104, 159), (104, 160), (104, 161), (104, 162), (104, 163), (104, 164), (104, 165), (104, 166), (104, 167), (104, 168), (104, 169), (104, 170), (104, 171), (104, 172), (104, 173), (104, 174), (104, 175), (104, 176), (104, 177), (104, 178), (104, 180), (105, 141), (105, 143),
(105, 144), (105, 145), (105, 146), (105, 147), (105, 148), (105, 149), (105, 150), (105, 151), (105, 152), (105, 153), (105, 154), (105, 155), (105, 156), (105, 157), (105, 158), (105, 159), (105, 160), (105, 161), (105, 162), (105, 163), (105, 164), (105, 165), (105, 166), (105, 167), (105, 168), (105, 169), (105, 170), (105, 171), (105, 172), (105, 173), (105, 174), (105, 175), (105, 176), (105, 177), (105, 179), (106, 141), (106, 143), (106, 144), (106, 145), (106, 146), (106, 147), (106, 148), (106, 149), (106, 150), (106, 151), (106, 152), (106, 153), (106, 154), (106, 155), (106, 156), (106, 157), (106, 158), (106, 159), (106, 160), (106, 161), (106, 162), (106, 163), (106, 164), (106, 165), (106, 166), (106, 167), (106, 168), (106, 169), (106, 170), (106, 171), (106, 172), (106, 173), (106, 174), (106, 175), (106, 176), (106, 177), (106, 179),
(107, 141), (107, 142), (107, 143), (107, 144), (107, 145), (107, 146), (107, 147), (107, 148), (107, 149), (107, 150), (107, 151), (107, 152), (107, 153), (107, 154), (107, 155), (107, 156), (107, 157), (107, 158), (107, 159), (107, 160), (107, 161), (107, 162), (107, 163), (107, 164), (107, 165), (107, 166), (107, 167), (107, 168), (107, 169), (107, 170), (107, 171), (107, 172), (107, 173), (107, 174), (107, 175), (107, 176), (107, 177), (107, 179), (108, 142), (108, 144), (108, 145), (108, 146), (108, 147), (108, 148), (108, 149), (108, 150), (108, 151), (108, 152), (108, 153), (108, 154), (108, 155), (108, 156), (108, 157), (108, 158), (108, 159), (108, 160), (108, 161), (108, 162), (108, 163), (108, 164), (108, 165), (108, 166), (108, 167), (108, 168), (108, 169), (108, 170), (108, 171), (108, 172), (108, 173), (108, 174), (108, 175), (108, 176),
(108, 178), (109, 142), (109, 144), (109, 145), (109, 146), (109, 147), (109, 148), (109, 149), (109, 150), (109, 151), (109, 152), (109, 153), (109, 154), (109, 155), (109, 156), (109, 157), (109, 158), (109, 159), (109, 160), (109, 161), (109, 162), (109, 163), (109, 164), (109, 165), (109, 166), (109, 167), (109, 168), (109, 169), (109, 170), (109, 171), (109, 172), (109, 173), (109, 174), (109, 175), (109, 176), (110, 143), (110, 145), (110, 146), (110, 147), (110, 148), (110, 149), (110, 150), (110, 151), (110, 152), (110, 153), (110, 154), (110, 155), (110, 156), (110, 157), (110, 158), (110, 159), (110, 160), (110, 161), (110, 162), (110, 163), (110, 164), (110, 165), (110, 166), (110, 167), (110, 168), (110, 169), (110, 170), (110, 171), (110, 172), (110, 173), (110, 174), (110, 175), (110, 177), (111, 143), (111, 145), (111, 146), (111, 147),
(111, 148), (111, 149), (111, 150), (111, 151), (111, 152), (111, 153), (111, 154), (111, 155), (111, 156), (111, 157), (111, 158), (111, 159), (111, 160), (111, 161), (111, 162), (111, 163), (111, 164), (111, 165), (111, 166), (111, 167), (111, 168), (111, 169), (111, 170), (111, 171), (111, 172), (111, 173), (111, 174), (111, 176), (112, 143), (112, 145), (112, 146), (112, 147), (112, 148), (112, 149), (112, 150), (112, 151), (112, 152), (112, 153), (112, 154), (112, 155), (112, 156), (112, 157), (112, 158), (112, 159), (112, 160), (112, 161), (112, 162), (112, 163), (112, 164), (112, 165), (112, 166), (112, 167), (112, 168), (112, 169), (112, 170), (112, 171), (112, 172), (112, 173), (112, 174), (113, 143), (113, 144), (113, 145), (113, 146), (113, 147), (113, 148), (113, 149), (113, 150), (113, 151), (113, 152), (113, 153), (113, 154), (113, 155),
(113, 156), (113, 157), (113, 158), (113, 159), (113, 160), (113, 161), (113, 162), (113, 163), (113, 164), (113, 165), (113, 166), (113, 167), (113, 168), (113, 169), (113, 170), (113, 171), (113, 172), (113, 173), (113, 175), (114, 144), (114, 146), (114, 147), (114, 148), (114, 149), (114, 150), (114, 151), (114, 152), (114, 153), (114, 154), (114, 155), (114, 156), (114, 157), (114, 158), (114, 159), (114, 160), (114, 161), (114, 162), (114, 163), (114, 164), (114, 165), (114, 166), (114, 167), (114, 168), (114, 169), (114, 170), (114, 171), (114, 172), (114, 174), (115, 144), (115, 146), (115, 147), (115, 148), (115, 149), (115, 150), (115, 151), (115, 152), (115, 153), (115, 154), (115, 155), (115, 156), (115, 157), (115, 158), (115, 159), (115, 160), (115, 161), (115, 162), (115, 163), (115, 164), (115, 165), (115, 166), (115, 167), (115, 168),
(115, 169), (115, 170), (115, 171), (115, 173), (116, 144), (116, 146), (116, 147), (116, 148), (116, 149), (116, 150), (116, 151), (116, 152), (116, 153), (116, 154), (116, 155), (116, 156), (116, 157), (116, 158), (116, 159), (116, 160), (116, 161), (116, 162), (116, 163), (116, 164), (116, 165), (116, 166), (116, 167), (116, 168), (116, 169), (116, 170), (116, 171), (116, 173), (117, 144), (117, 146), (117, 147), (117, 148), (117, 149), (117, 150), (117, 151), (117, 152), (117, 153), (117, 154), (117, 155), (117, 156), (117, 157), (117, 158), (117, 159), (117, 160), (117, 161), (117, 162), (117, 163), (117, 164), (117, 165), (117, 166), (117, 167), (117, 168), (117, 169), (117, 170), (117, 172), (118, 144), (118, 146), (118, 147), (118, 148), (118, 149), (118, 150), (118, 151), (118, 152), (118, 153), (118, 154), (118, 155), (118, 156), (118, 157),
(118, 158), (118, 159), (118, 160), (118, 161), (118, 162), (118, 163), (118, 164), (118, 165), (118, 166), (118, 167), (118, 168), (118, 169), (118, 170), (118, 172), (119, 143), (119, 145), (119, 146), (119, 147), (119, 148), (119, 149), (119, 150), (119, 151), (119, 152), (119, 153), (119, 154), (119, 155), (119, 156), (119, 157), (119, 158), (119, 159), (119, 160), (119, 161), (119, 162), (119, 163), (119, 164), (119, 165), (119, 166), (119, 167), (119, 168), (119, 169), (119, 171), (120, 142), (120, 144), (120, 145), (120, 146), (120, 147), (120, 148), (120, 149), (120, 150), (120, 151), (120, 152), (120, 153), (120, 154), (120, 155), (120, 156), (120, 157), (120, 158), (120, 159), (120, 160), (120, 161), (120, 162), (120, 163), (120, 164), (120, 165), (120, 166), (120, 167), (120, 168), (120, 171), (121, 143), (121, 145), (121, 146), (121, 147),
(121, 148), (121, 149), (121, 150), (121, 151), (121, 152), (121, 153), (121, 154), (121, 155), (121, 156), (121, 157), (121, 158), (121, 159), (121, 160), (121, 161), (121, 162), (121, 163), (121, 164), (121, 169), (121, 171), (122, 143), (122, 145), (122, 146), (122, 147), (122, 148), (122, 149), (122, 150), (122, 151), (122, 152), (122, 153), (122, 154), (122, 155), (122, 156), (122, 157), (122, 158), (122, 159), (122, 160), (122, 161), (122, 165), (122, 166), (122, 167), (122, 168), (123, 144), (123, 146), (123, 147), (123, 148), (123, 149), (123, 150), (123, 151), (123, 152), (123, 153), (123, 154), (123, 155), (123, 156), (123, 157), (123, 158), (123, 162), (123, 163), (124, 144), (124, 146), (124, 147), (124, 148), (124, 149), (124, 150), (124, 151), (124, 152), (124, 153), (124, 154), (124, 155), (124, 156), (124, 159), (124, 160), (125, 145),
(125, 147), (125, 148), (125, 149), (125, 150), (125, 151), (125, 152), (125, 153), (125, 154), (125, 157), (125, 158), (126, 145), (126, 147), (126, 148), (126, 149), (126, 150), (126, 151), (126, 152), (126, 155), (126, 156), (127, 146), (127, 148), (127, 149), (127, 150), (127, 153), (127, 154), (128, 146), (128, 148), (128, 151), (128, 152), (129, 147), (129, 150), (130, 147), (130, 148), (315, 144), (316, 144), (316, 146), (317, 143), (317, 147), (318, 143), (318, 145), (318, 146), (318, 148), (319, 143), (319, 145), (319, 146), (319, 147), (319, 149), (320, 142), (320, 144), (320, 145), (320, 146), (320, 147), (320, 148), (320, 151), (320, 161), (321, 142), (321, 144), (321, 145), (321, 146), (321, 147), (321, 148), (321, 149), (321, 152), (321, 159), (321, 162), (322, 141), (322, 143), (322, 144), (322, 145), (322, 146), (322, 147), (322, 148),
(322, 149), (322, 150), (322, 151), (322, 153), (322, 157), (322, 161), (322, 163), (323, 141), (323, 143), (323, 144), (323, 145), (323, 146), (323, 147), (323, 148), (323, 149), (323, 150), (323, 151), (323, 152), (323, 155), (323, 156), (323, 159), (323, 160), (323, 161), (323, 162), (324, 140), (324, 141), (324, 142), (324, 143), (324, 144), (324, 145), (324, 146), (324, 147), (324, 148), (324, 149), (324, 150), (324, 151), (324, 152), (324, 153), (324, 157), (324, 158), (324, 159), (324, 160), (324, 161), (324, 162), (324, 163), (324, 166), (325, 140), (325, 142), (325, 143), (325, 144), (325, 145), (325, 146), (325, 147), (325, 148), (325, 149), (325, 150), (325, 151), (325, 152), (325, 153), (325, 154), (325, 155), (325, 156), (325, 157), (325, 158), (325, 159), (325, 160), (325, 161), (325, 162), (325, 163), (325, 164), (325, 167), (326, 140),
(326, 142), (326, 143), (326, 144), (326, 145), (326, 146), (326, 147), (326, 148), (326, 149), (326, 150), (326, 151), (326, 152), (326, 153), (326, 154), (326, 155), (326, 156), (326, 157), (326, 158), (326, 159), (326, 160), (326, 161), (326, 162), (326, 163), (326, 164), (326, 165), (326, 166), (326, 168), (326, 169), (326, 171), (327, 139), (327, 141), (327, 142), (327, 143), (327, 144), (327, 145), (327, 146), (327, 147), (327, 148), (327, 149), (327, 150), (327, 151), (327, 152), (327, 153), (327, 154), (327, 155), (327, 156), (327, 157), (327, 158), (327, 159), (327, 160), (327, 161), (327, 162), (327, 163), (327, 164), (327, 165), (327, 166), (327, 167), (327, 171), (328, 140), (328, 142), (328, 143), (328, 144), (328, 145), (328, 146), (328, 147), (328, 148), (328, 149), (328, 150), (328, 151), (328, 152), (328, 153), (328, 154), (328, 155),
(328, 156), (328, 157), (328, 158), (328, 159), (328, 160), (328, 161), (328, 162), (328, 163), (328, 164), (328, 165), (328, 166), (328, 167), (328, 168), (328, 169), (328, 171), (329, 141), (329, 143), (329, 144), (329, 145), (329, 146), (329, 147), (329, 148), (329, 149), (329, 150), (329, 151), (329, 152), (329, 153), (329, 154), (329, 155), (329, 156), (329, 157), (329, 158), (329, 159), (329, 160), (329, 161), (329, 162), (329, 163), (329, 164), (329, 165), (329, 166), (329, 167), (329, 168), (329, 169), (329, 170), (329, 172), (330, 142), (330, 144), (330, 145), (330, 146), (330, 147), (330, 148), (330, 149), (330, 150), (330, 151), (330, 152), (330, 153), (330, 154), (330, 155), (330, 156), (330, 157), (330, 158), (330, 159), (330, 160), (330, 161), (330, 162), (330, 163), (330, 164), (330, 165), (330, 166), (330, 167), (330, 168), (330, 169),
(330, 170), (330, 172), (331, 142), (331, 144), (331, 145), (331, 146), (331, 147), (331, 148), (331, 149), (331, 150), (331, 151), (331, 152), (331, 153), (331, 154), (331, 155), (331, 156), (331, 157), (331, 158), (331, 159), (331, 160), (331, 161), (331, 162), (331, 163), (331, 164), (331, 165), (331, 166), (331, 167), (331, 168), (331, 169), (331, 170), (331, 171), (331, 173), (332, 142), (332, 144), (332, 145), (332, 146), (332, 147), (332, 148), (332, 149), (332, 150), (332, 151), (332, 152), (332, 153), (332, 154), (332, 155), (332, 156), (332, 157), (332, 158), (332, 159), (332, 160), (332, 161), (332, 162), (332, 163), (332, 164), (332, 165), (332, 166), (332, 167), (332, 168), (332, 169), (332, 170), (332, 171), (332, 172), (332, 174), (333, 142), (333, 144), (333, 145), (333, 146), (333, 147), (333, 148), (333, 149), (333, 150), (333, 151),
(333, 152), (333, 153), (333, 154), (333, 155), (333, 156), (333, 157), (333, 158), (333, 159), (333, 160), (333, 161), (333, 162), (333, 163), (333, 164), (333, 165), (333, 166), (333, 167), (333, 168), (333, 169), (333, 170), (333, 171), (333, 172), (334, 142), (334, 144), (334, 145), (334, 146), (334, 147), (334, 148), (334, 149), (334, 150), (334, 151), (334, 152), (334, 153), (334, 154), (334, 155), (334, 156), (334, 157), (334, 158), (334, 159), (334, 160), (334, 161), (334, 162), (334, 163), (334, 164), (334, 165), (334, 166), (334, 167), (334, 168), (334, 169), (334, 170), (334, 171), (334, 172), (334, 173), (334, 175), (335, 142), (335, 144), (335, 145), (335, 146), (335, 147), (335, 148), (335, 149), (335, 150), (335, 151), (335, 152), (335, 153), (335, 154), (335, 155), (335, 156), (335, 157), (335, 158), (335, 159), (335, 160), (335, 161),
(335, 162), (335, 163), (335, 164), (335, 165), (335, 166), (335, 167), (335, 168), (335, 169), (335, 170), (335, 171), (335, 172), (335, 173), (335, 174), (335, 176), (336, 142), (336, 144), (336, 145), (336, 146), (336, 147), (336, 148), (336, 149), (336, 150), (336, 151), (336, 152), (336, 153), (336, 154), (336, 155), (336, 156), (336, 157), (336, 158), (336, 159), (336, 160), (336, 161), (336, 162), (336, 163), (336, 164), (336, 165), (336, 166), (336, 167), (336, 168), (336, 169), (336, 170), (336, 171), (336, 172), (336, 173), (336, 174), (336, 175), (336, 177), (337, 142), (337, 144), (337, 145), (337, 146), (337, 147), (337, 148), (337, 149), (337, 150), (337, 151), (337, 152), (337, 153), (337, 154), (337, 155), (337, 156), (337, 157), (337, 158), (337, 159), (337, 160), (337, 161), (337, 162), (337, 163), (337, 164), (337, 165), (337, 166),
(337, 167), (337, 168), (337, 169), (337, 170), (337, 171), (337, 172), (337, 173), (337, 174), (337, 175), (337, 176), (337, 178), (338, 141), (338, 143), (338, 144), (338, 145), (338, 146), (338, 147), (338, 148), (338, 149), (338, 150), (338, 151), (338, 152), (338, 153), (338, 154), (338, 155), (338, 156), (338, 157), (338, 158), (338, 159), (338, 160), (338, 161), (338, 162), (338, 163), (338, 164), (338, 165), (338, 166), (338, 167), (338, 168), (338, 169), (338, 170), (338, 171), (338, 172), (338, 173), (338, 174), (338, 175), (338, 176), (338, 177), (339, 141), (339, 143), (339, 144), (339, 145), (339, 146), (339, 147), (339, 148), (339, 149), (339, 150), (339, 151), (339, 152), (339, 153), (339, 154), (339, 155), (339, 156), (339, 157), (339, 158), (339, 159), (339, 160), (339, 161), (339, 162), (339, 163), (339, 164), (339, 165), (339, 166),
(339, 167), (339, 168), (339, 169), (339, 170), (339, 171), (339, 172), (339, 173), (339, 174), (339, 175), (339, 176), (339, 177), (339, 179), (340, 141), (340, 143), (340, 144), (340, 145), (340, 146), (340, 147), (340, 148), (340, 149), (340, 150), (340, 151), (340, 152), (340, 153), (340, 154), (340, 155), (340, 156), (340, 157), (340, 158), (340, 159), (340, 160), (340, 161), (340, 162), (340, 163), (340, 164), (340, 165), (340, 166), (340, 167), (340, 168), (340, 169), (340, 170), (340, 171), (340, 172), (340, 173), (340, 174), (340, 175), (340, 176), (340, 177), (340, 178), (341, 140), (341, 142), (341, 143), (341, 144), (341, 145), (341, 146), (341, 147), (341, 148), (341, 149), (341, 150), (341, 151), (341, 152), (341, 153), (341, 154), (341, 155), (341, 156), (341, 157), (341, 158), (341, 159), (341, 160), (341, 161), (341, 162), (341, 163),
(341, 164), (341, 165), (341, 166), (341, 167), (341, 168), (341, 169), (341, 170), (341, 171), (341, 172), (341, 173), (341, 174), (341, 175), (341, 176), (341, 177), (341, 178), (341, 180), (342, 140), (342, 142), (342, 143), (342, 144), (342, 145), (342, 146), (342, 147), (342, 148), (342, 149), (342, 150), (342, 151), (342, 152), (342, 153), (342, 154), (342, 155), (342, 156), (342, 157), (342, 158), (342, 159), (342, 160), (342, 161), (342, 162), (342, 163), (342, 164), (342, 165), (342, 166), (342, 167), (342, 168), (342, 169), (342, 170), (342, 171), (342, 172), (342, 173), (342, 174), (342, 175), (342, 176), (342, 177), (342, 178), (342, 180), (343, 139), (343, 141), (343, 142), (343, 143), (343, 144), (343, 145), (343, 146), (343, 147), (343, 148), (343, 149), (343, 150), (343, 151), (343, 152), (343, 153), (343, 154), (343, 155), (343, 156),
(343, 157), (343, 158), (343, 159), (343, 160), (343, 161), (343, 162), (343, 163), (343, 164), (343, 165), (343, 166), (343, 167), (343, 168), (343, 169), (343, 170), (343, 171), (343, 172), (343, 173), (343, 174), (343, 175), (343, 176), (343, 177), (343, 178), (343, 180), (344, 139), (344, 141), (344, 142), (344, 143), (344, 144), (344, 145), (344, 146), (344, 147), (344, 148), (344, 149), (344, 150), (344, 151), (344, 152), (344, 153), (344, 154), (344, 155), (344, 156), (344, 157), (344, 158), (344, 159), (344, 160), (344, 161), (344, 162), (344, 163), (344, 164), (344, 165), (344, 166), (344, 167), (344, 168), (344, 169), (344, 170), (344, 171), (344, 172), (344, 173), (344, 174), (344, 175), (344, 176), (344, 177), (344, 179), (345, 138), (345, 140), (345, 141), (345, 142), (345, 143), (345, 144), (345, 145), (345, 146), (345, 147), (345, 148),
(345, 149), (345, 150), (345, 151), (345, 152), (345, 153), (345, 154), (345, 155), (345, 156), (345, 157), (345, 158), (345, 159), (345, 160), (345, 161), (345, 162), (345, 163), (345, 164), (345, 165), (345, 166), (345, 167), (345, 168), (345, 169), (345, 170), (345, 171), (345, 172), (345, 173), (345, 174), (345, 175), (345, 176), (345, 178), (346, 137), (346, 139), (346, 140), (346, 141), (346, 142), (346, 143), (346, 144), (346, 145), (346, 146), (346, 147), (346, 148), (346, 149), (346, 150), (346, 151), (346, 152), (346, 153), (346, 154), (346, 155), (346, 156), (346, 157), (346, 158), (346, 159), (346, 160), (346, 161), (346, 162), (346, 163), (346, 164), (346, 165), (346, 166), (346, 167), (346, 168), (346, 169), (346, 170), (346, 171), (346, 172), (346, 173), (346, 174), (346, 175), (346, 177), (347, 136), (347, 138), (347, 139), (347, 140),
(347, 141), (347, 142), (347, 143), (347, 144), (347, 145), (347, 146), (347, 147), (347, 148), (347, 149), (347, 150), (347, 151), (347, 152), (347, 153), (347, 154), (347, 155), (347, 156), (347, 157), (347, 158), (347, 159), (347, 160), (347, 161), (347, 162), (347, 163), (347, 164), (347, 165), (347, 166), (347, 167), (347, 168), (347, 169), (347, 170), (347, 171), (347, 172), (347, 173), (347, 174), (347, 176), (348, 136), (348, 138), (348, 139), (348, 140), (348, 141), (348, 142), (348, 143), (348, 144), (348, 145), (348, 146), (348, 147), (348, 148), (348, 149), (348, 150), (348, 151), (348, 152), (348, 153), (348, 154), (348, 155), (348, 156), (348, 157), (348, 158), (348, 159), (348, 160), (348, 161), (348, 162), (348, 163), (348, 164), (348, 165), (348, 166), (348, 167), (348, 168), (348, 169), (348, 170), (348, 171), (348, 172), (348, 173),
(349, 136), (349, 138), (349, 139), (349, 140), (349, 141), (349, 142), (349, 143), (349, 144), (349, 145), (349, 146), (349, 147), (349, 148), (349, 149), (349, 150), (349, 151), (349, 152), (349, 153), (349, 154), (349, 155), (349, 156), (349, 157), (349, 158), (349, 159), (349, 160), (349, 161), (349, 162), (349, 163), (349, 164), (349, 165), (349, 166), (349, 167), (349, 168), (349, 169), (349, 170), (349, 171), (349, 172), (349, 175), (350, 137), (350, 139), (350, 140), (350, 141), (350, 142), (350, 143), (350, 144), (350, 145), (350, 146), (350, 147), (350, 148), (350, 149), (350, 150), (350, 151), (350, 152), (350, 153), (350, 154), (350, 155), (350, 156), (350, 157), (350, 158), (350, 159), (350, 160), (350, 161), (350, 162), (350, 163), (350, 164), (350, 165), (350, 166), (350, 167), (350, 168), (350, 169), (350, 170), (350, 171), (350, 172),
(350, 174), (351, 138), (351, 141), (351, 142), (351, 143), (351, 144), (351, 145), (351, 146), (351, 147), (351, 148), (351, 149), (351, 150), (351, 151), (351, 152), (351, 153), (351, 154), (351, 155), (351, 156), (351, 157), (351, 158), (351, 159), (351, 160), (351, 161), (351, 162), (351, 163), (351, 164), (351, 165), (351, 166), (351, 167), (351, 168), (351, 169), (351, 170), (351, 171), (351, 173), (352, 139), (352, 140), (352, 144), (352, 145), (352, 146), (352, 147), (352, 148), (352, 149), (352, 150), (352, 151), (352, 152), (352, 153), (352, 154), (352, 155), (352, 156), (352, 157), (352, 158), (352, 159), (352, 160), (352, 161), (352, 162), (352, 163), (352, 164), (352, 165), (352, 166), (352, 167), (352, 168), (352, 169), (352, 170), (352, 172), (353, 141), (353, 147), (353, 148), (353, 149), (353, 150), (353, 151), (353, 152), (353, 153),
(353, 154), (353, 155), (353, 156), (353, 157), (353, 158), (353, 159), (353, 160), (353, 161), (353, 162), (353, 163), (353, 164), (353, 165), (353, 166), (353, 167), (353, 168), (353, 171), (354, 144), (354, 145), (354, 146), (354, 153), (354, 154), (354, 155), (354, 156), (354, 157), (354, 158), (354, 159), (354, 160), (354, 161), (354, 162), (354, 163), (354, 164), (354, 170), (355, 147), (355, 148), (355, 149), (355, 150), (355, 151), (355, 152), (355, 165), (355, 166), (355, 167), (355, 168), (356, 153), (356, 154), (356, 155), (356, 156), (356, 157), (356, 158), (356, 159), (356, 160), (356, 161), (356, 162), (356, 163), (356, 164), )
coordinates_FF3366 = ((100, 135),
(100, 137), (101, 133), (101, 137), (102, 129), (102, 131), (102, 132), (102, 135), (102, 137), (103, 133), (103, 134), (103, 135), (103, 136), (103, 138), (104, 130), (104, 132), (104, 133), (104, 134), (104, 135), (104, 136), (104, 138), (105, 130), (105, 132), (105, 133), (105, 134), (105, 135), (105, 136), (105, 138), (106, 130), (106, 132), (106, 133), (106, 134), (106, 135), (106, 136), (106, 137), (106, 139), (107, 131), (107, 133), (107, 134), (107, 135), (107, 136), (107, 137), (107, 139), (108, 131), (108, 133), (108, 134), (108, 135), (108, 136), (108, 137), (108, 138), (108, 140), (109, 132), (109, 134), (109, 135), (109, 136), (109, 137), (109, 138), (109, 140), (110, 132), (110, 134), (110, 135), (110, 136), (110, 137), (110, 138), (110, 140), (111, 133), (111, 135), (111, 136), (111, 137), (111, 138), (111, 139), (111, 141), (112, 133),
(112, 135), (112, 136), (112, 137), (112, 138), (112, 139), (112, 141), (113, 134), (113, 136), (113, 137), (113, 138), (113, 139), (113, 141), (114, 135), (114, 137), (114, 138), (114, 139), (114, 141), (115, 136), (115, 138), (115, 139), (115, 140), (115, 142), (116, 137), (116, 139), (116, 140), (116, 142), (117, 138), (117, 142), (118, 139), (118, 142), (329, 138), (330, 136), (330, 139), (331, 136), (331, 138), (331, 140), (332, 135), (332, 137), (332, 138), (332, 140), (333, 134), (333, 136), (333, 137), (333, 138), (333, 140), (334, 134), (334, 136), (334, 137), (334, 138), (334, 140), (335, 133), (335, 135), (335, 136), (335, 137), (335, 138), (335, 140), (336, 132), (336, 134), (336, 135), (336, 136), (336, 137), (336, 138), (336, 140), (337, 132), (337, 134), (337, 135), (337, 136), (337, 137), (337, 139), (338, 131), (338, 133), (338, 134),
(338, 135), (338, 136), (338, 137), (338, 139), (339, 131), (339, 133), (339, 134), (339, 135), (339, 136), (339, 137), (339, 139), (340, 130), (340, 132), (340, 133), (340, 134), (340, 135), (340, 136), (340, 138), (341, 130), (341, 132), (341, 133), (341, 134), (341, 135), (341, 136), (341, 138), (342, 130), (342, 132), (342, 133), (342, 134), (342, 135), (342, 136), (342, 138), (343, 129), (343, 131), (343, 132), (343, 133), (343, 134), (343, 135), (343, 137), (344, 129), (344, 136), (345, 130), (345, 132), (345, 133), (345, 135), )
coordinates_666699 = ((126, 167),
(126, 168), (126, 169), (126, 170), (126, 171), (126, 172), (126, 174), (127, 165), (127, 175), (127, 177), (128, 163), (128, 166), (128, 167), (128, 168), (128, 169), (128, 170), (128, 171), (128, 172), (128, 173), (128, 174), (128, 179), (129, 162), (129, 165), (129, 166), (129, 167), (129, 168), (129, 169), (129, 170), (129, 171), (129, 172), (129, 173), (129, 174), (129, 175), (129, 176), (129, 177), (129, 181), (130, 161), (130, 163), (130, 164), (130, 165), (130, 166), (130, 167), (130, 168), (130, 169), (130, 170), (130, 171), (130, 172), (130, 173), (130, 174), (130, 175), (130, 176), (130, 177), (130, 178), (130, 179), (130, 183), (131, 160), (131, 162), (131, 163), (131, 164), (131, 165), (131, 166), (131, 167), (131, 168), (131, 169), (131, 170), (131, 171), (131, 172), (131, 173), (131, 174), (131, 175), (131, 176), (131, 177), (131, 178),
(131, 179), (131, 180), (131, 181), (131, 185), (131, 186), (132, 160), (132, 162), (132, 163), (132, 164), (132, 165), (132, 166), (132, 167), (132, 168), (132, 169), (132, 170), (132, 171), (132, 172), (132, 173), (132, 174), (132, 175), (132, 176), (132, 177), (132, 178), (132, 179), (132, 180), (132, 181), (132, 182), (132, 183), (132, 187), (132, 189), (132, 190), (133, 159), (133, 161), (133, 162), (133, 163), (133, 164), (133, 165), (133, 166), (133, 167), (133, 168), (133, 169), (133, 170), (133, 171), (133, 172), (133, 173), (133, 174), (133, 175), (133, 176), (133, 177), (133, 178), (133, 179), (133, 180), (133, 181), (133, 182), (133, 183), (133, 184), (133, 185), (133, 186), (133, 190), (134, 158), (134, 160), (134, 161), (134, 162), (134, 163), (134, 164), (134, 165), (134, 166), (134, 167), (134, 168), (134, 169), (134, 170), (134, 171),
(134, 172), (134, 173), (134, 174), (134, 175), (134, 176), (134, 177), (134, 178), (134, 179), (134, 180), (134, 181), (134, 182), (134, 183), (134, 184), (134, 185), (134, 186), (134, 187), (134, 188), (134, 190), (135, 158), (135, 160), (135, 161), (135, 162), (135, 163), (135, 164), (135, 165), (135, 166), (135, 167), (135, 168), (135, 169), (135, 170), (135, 171), (135, 172), (135, 173), (135, 174), (135, 175), (135, 176), (135, 177), (135, 178), (135, 179), (135, 180), (135, 181), (135, 182), (135, 183), (135, 184), (135, 185), (135, 186), (135, 187), (135, 188), (135, 190), (136, 157), (136, 159), (136, 160), (136, 161), (136, 162), (136, 163), (136, 164), (136, 165), (136, 166), (136, 167), (136, 168), (136, 169), (136, 170), (136, 171), (136, 172), (136, 173), (136, 174), (136, 175), (136, 176), (136, 177), (136, 178), (136, 179), (136, 180),
(136, 181), (136, 182), (136, 183), (136, 184), (136, 185), (136, 186), (136, 187), (136, 188), (136, 190), (137, 156), (137, 158), (137, 159), (137, 160), (137, 161), (137, 162), (137, 163), (137, 164), (137, 165), (137, 166), (137, 167), (137, 168), (137, 169), (137, 170), (137, 171), (137, 172), (137, 173), (137, 174), (137, 175), (137, 176), (137, 177), (137, 178), (137, 179), (137, 180), (137, 181), (137, 182), (137, 183), (137, 184), (137, 185), (137, 186), (137, 187), (137, 188), (137, 190), (138, 156), (138, 158), (138, 159), (138, 160), (138, 161), (138, 162), (138, 163), (138, 164), (138, 165), (138, 166), (138, 167), (138, 168), (138, 169), (138, 170), (138, 171), (138, 172), (138, 173), (138, 174), (138, 175), (138, 176), (138, 177), (138, 178), (138, 179), (138, 180), (138, 181), (138, 182), (138, 183), (138, 184), (138, 185), (138, 186),
(138, 187), (138, 188), (138, 189), (138, 190), (138, 191), (139, 155), (139, 157), (139, 158), (139, 159), (139, 160), (139, 161), (139, 162), (139, 163), (139, 164), (139, 165), (139, 166), (139, 167), (139, 168), (139, 169), (139, 170), (139, 171), (139, 172), (139, 173), (139, 174), (139, 175), (139, 176), (139, 177), (139, 178), (139, 179), (139, 180), (139, 181), (139, 182), (139, 183), (139, 184), (139, 185), (139, 186), (139, 187), (139, 188), (139, 189), (139, 191), (140, 154), (140, 156), (140, 157), (140, 158), (140, 159), (140, 160), (140, 161), (140, 162), (140, 163), (140, 164), (140, 165), (140, 166), (140, 167), (140, 168), (140, 169), (140, 170), (140, 171), (140, 172), (140, 173), (140, 174), (140, 175), (140, 176), (140, 177), (140, 178), (140, 179), (140, 180), (140, 181), (140, 182), (140, 183), (140, 184), (140, 185), (140, 186),
(140, 187), (140, 188), (140, 189), (140, 191), (141, 153), (141, 155), (141, 156), (141, 157), (141, 158), (141, 159), (141, 160), (141, 161), (141, 162), (141, 163), (141, 164), (141, 165), (141, 166), (141, 167), (141, 168), (141, 169), (141, 170), (141, 171), (141, 172), (141, 173), (141, 174), (141, 175), (141, 176), (141, 177), (141, 178), (141, 179), (141, 180), (141, 181), (141, 182), (141, 183), (141, 184), (141, 185), (141, 186), (141, 187), (141, 188), (141, 189), (141, 191), (142, 152), (142, 155), (142, 156), (142, 157), (142, 158), (142, 159), (142, 160), (142, 161), (142, 162), (142, 163), (142, 164), (142, 165), (142, 166), (142, 167), (142, 168), (142, 169), (142, 170), (142, 171), (142, 172), (142, 173), (142, 174), (142, 175), (142, 176), (142, 177), (142, 178), (142, 179), (142, 180), (142, 181), (142, 182), (142, 183), (142, 184),
(142, 185), (142, 186), (142, 187), (142, 188), (142, 189), (142, 190), (142, 192), (143, 153), (143, 156), (143, 157), (143, 158), (143, 159), (143, 160), (143, 161), (143, 162), (143, 163), (143, 164), (143, 165), (143, 166), (143, 167), (143, 168), (143, 169), (143, 170), (143, 171), (143, 172), (143, 173), (143, 174), (143, 175), (143, 176), (143, 177), (143, 178), (143, 179), (143, 180), (143, 181), (143, 182), (143, 183), (143, 184), (143, 185), (143, 186), (143, 187), (143, 188), (143, 189), (143, 190), (143, 192), (144, 155), (144, 157), (144, 158), (144, 159), (144, 160), (144, 161), (144, 162), (144, 163), (144, 164), (144, 165), (144, 166), (144, 167), (144, 168), (144, 169), (144, 170), (144, 171), (144, 172), (144, 173), (144, 174), (144, 175), (144, 176), (144, 177), (144, 178), (144, 179), (144, 180), (144, 181), (144, 182), (144, 183),
(144, 184), (144, 185), (144, 186), (144, 187), (144, 188), (144, 189), (144, 190), (144, 191), (144, 193), (145, 156), (145, 158), (145, 159), (145, 160), (145, 161), (145, 162), (145, 163), (145, 164), (145, 165), (145, 166), (145, 167), (145, 168), (145, 169), (145, 170), (145, 171), (145, 172), (145, 173), (145, 174), (145, 175), (145, 176), (145, 177), (145, 178), (145, 179), (145, 180), (145, 181), (145, 182), (145, 183), (145, 184), (145, 185), (145, 186), (145, 187), (145, 188), (145, 189), (145, 190), (145, 191), (145, 193), (146, 157), (146, 159), (146, 160), (146, 161), (146, 162), (146, 163), (146, 164), (146, 165), (146, 166), (146, 167), (146, 168), (146, 169), (146, 170), (146, 171), (146, 172), (146, 173), (146, 174), (146, 175), (146, 176), (146, 177), (146, 178), (146, 179), (146, 180), (146, 181), (146, 182), (146, 183), (146, 184),
(146, 185), (146, 186), (146, 187), (146, 188), (146, 189), (146, 190), (146, 191), (146, 193), (147, 158), (147, 160), (147, 161), (147, 162), (147, 163), (147, 164), (147, 165), (147, 166), (147, 167), (147, 168), (147, 169), (147, 170), (147, 171), (147, 172), (147, 173), (147, 174), (147, 175), (147, 176), (147, 177), (147, 178), (147, 179), (147, 180), (147, 181), (147, 182), (147, 183), (147, 184), (147, 185), (147, 186), (147, 187), (147, 188), (147, 189), (147, 190), (147, 191), (147, 192), (147, 194), (148, 159), (148, 161), (148, 162), (148, 163), (148, 164), (148, 165), (148, 166), (148, 167), (148, 168), (148, 169), (148, 170), (148, 171), (148, 172), (148, 173), (148, 174), (148, 175), (148, 176), (148, 177), (148, 178), (148, 179), (148, 180), (148, 181), (148, 182), (148, 183), (148, 184), (148, 185), (148, 186), (148, 187), (148, 188),
(148, 189), (148, 190), (148, 191), (148, 192), (148, 194), (149, 160), (149, 162), (149, 163), (149, 164), (149, 165), (149, 166), (149, 167), (149, 168), (149, 169), (149, 170), (149, 171), (149, 172), (149, 173), (149, 174), (149, 175), (149, 176), (149, 177), (149, 178), (149, 179), (149, 180), (149, 181), (149, 182), (149, 183), (149, 184), (149, 185), (149, 186), (149, 187), (149, 188), (149, 189), (149, 190), (149, 191), (149, 192), (149, 193), (149, 195), (150, 160), (150, 162), (150, 163), (150, 164), (150, 165), (150, 166), (150, 167), (150, 168), (150, 169), (150, 170), (150, 171), (150, 172), (150, 173), (150, 174), (150, 175), (150, 176), (150, 177), (150, 178), (150, 179), (150, 180), (150, 181), (150, 182), (150, 183), (150, 184), (150, 185), (150, 186), (150, 187), (150, 188), (150, 189), (150, 190), (150, 191), (150, 192), (150, 193),
(150, 195), (151, 161), (151, 163), (151, 164), (151, 165), (151, 166), (151, 167), (151, 168), (151, 169), (151, 170), (151, 171), (151, 172), (151, 173), (151, 174), (151, 175), (151, 176), (151, 177), (151, 178), (151, 179), (151, 180), (151, 181), (151, 182), (151, 183), (151, 184), (151, 185), (151, 186), (151, 187), (151, 188), (151, 189), (151, 190), (151, 191), (151, 192), (151, 193), (151, 194), (151, 196), (152, 162), (152, 164), (152, 165), (152, 166), (152, 167), (152, 168), (152, 169), (152, 170), (152, 171), (152, 172), (152, 173), (152, 174), (152, 175), (152, 176), (152, 177), (152, 178), (152, 179), (152, 180), (152, 181), (152, 182), (152, 183), (152, 184), (152, 185), (152, 186), (152, 187), (152, 188), (152, 189), (152, 190), (152, 191), (152, 192), (152, 193), (152, 194), (152, 196), (153, 162), (153, 164), (153, 165), (153, 166),
(153, 167), (153, 168), (153, 169), (153, 170), (153, 171), (153, 172), (153, 173), (153, 174), (153, 175), (153, 176), (153, 177), (153, 178), (153, 179), (153, 180), (153, 181), (153, 182), (153, 183), (153, 184), (153, 185), (153, 186), (153, 187), (153, 188), (153, 189), (153, 190), (153, 191), (153, 192), (153, 193), (153, 194), (153, 196), (154, 163), (154, 165), (154, 166), (154, 167), (154, 168), (154, 169), (154, 170), (154, 171), (154, 172), (154, 173), (154, 174), (154, 175), (154, 176), (154, 177), (154, 178), (154, 179), (154, 180), (154, 181), (154, 182), (154, 183), (154, 184), (154, 185), (154, 186), (154, 187), (154, 188), (154, 189), (154, 190), (154, 191), (154, 192), (154, 193), (154, 194), (154, 196), (155, 163), (155, 165), (155, 166), (155, 167), (155, 168), (155, 169), (155, 170), (155, 171), (155, 172), (155, 173), (155, 174),
(155, 175), (155, 176), (155, 177), (155, 178), (155, 179), (155, 180), (155, 181), (155, 182), (155, 183), (155, 184), (155, 185), (155, 186), (155, 187), (155, 188), (155, 189), (155, 190), (155, 191), (155, 192), (155, 193), (155, 194), (155, 196), (156, 164), (156, 166), (156, 167), (156, 168), (156, 169), (156, 170), (156, 171), (156, 172), (156, 173), (156, 174), (156, 175), (156, 176), (156, 177), (156, 178), (156, 179), (156, 180), (156, 181), (156, 182), (156, 183), (156, 184), (156, 185), (156, 186), (156, 187), (156, 188), (156, 189), (156, 190), (156, 191), (156, 192), (156, 193), (156, 194), (156, 196), (157, 164), (157, 166), (157, 167), (157, 168), (157, 169), (157, 170), (157, 171), (157, 172), (157, 173), (157, 174), (157, 175), (157, 176), (157, 177), (157, 178), (157, 179), (157, 180), (157, 181), (157, 182), (157, 183), (157, 184),
(157, 185), (157, 186), (157, 187), (157, 188), (157, 189), (157, 190), (157, 191), (157, 192), (157, 193), (157, 194), (157, 196), (158, 165), (158, 167), (158, 168), (158, 169), (158, 170), (158, 171), (158, 172), (158, 173), (158, 174), (158, 175), (158, 176), (158, 177), (158, 178), (158, 179), (158, 180), (158, 181), (158, 182), (158, 183), (158, 184), (158, 185), (158, 186), (158, 187), (158, 188), (158, 189), (158, 190), (158, 191), (158, 192), (158, 193), (158, 194), (158, 196), (159, 165), (159, 167), (159, 168), (159, 169), (159, 170), (159, 171), (159, 172), (159, 173), (159, 174), (159, 175), (159, 176), (159, 177), (159, 178), (159, 179), (159, 180), (159, 181), (159, 182), (159, 183), (159, 184), (159, 185), (159, 186), (159, 187), (159, 188), (159, 189), (159, 190), (159, 191), (159, 192), (159, 193), (159, 194), (159, 196), (160, 166),
(160, 168), (160, 169), (160, 170), (160, 171), (160, 172), (160, 173), (160, 174), (160, 175), (160, 176), (160, 177), (160, 178), (160, 179), (160, 180), (160, 181), (160, 182), (160, 183), (160, 184), (160, 185), (160, 186), (160, 187), (160, 188), (160, 189), (160, 190), (160, 191), (160, 192), (160, 193), (160, 194), (160, 196), (161, 166), (161, 168), (161, 169), (161, 170), (161, 171), (161, 172), (161, 173), (161, 174), (161, 175), (161, 176), (161, 177), (161, 178), (161, 179), (161, 180), (161, 181), (161, 182), (161, 183), (161, 184), (161, 185), (161, 186), (161, 187), (161, 188), (161, 189), (161, 190), (161, 191), (161, 192), (161, 193), (161, 194), (161, 196), (162, 167), (162, 169), (162, 170), (162, 171), (162, 172), (162, 173), (162, 174), (162, 175), (162, 176), (162, 177), (162, 178), (162, 179), (162, 180), (162, 181), (162, 182),
(162, 183), (162, 184), (162, 185), (162, 186), (162, 187), (162, 188), (162, 189), (162, 190), (162, 191), (162, 192), (162, 193), (162, 194), (162, 196), (163, 167), (163, 169), (163, 170), (163, 171), (163, 172), (163, 173), (163, 174), (163, 175), (163, 176), (163, 177), (163, 178), (163, 179), (163, 180), (163, 181), (163, 182), (163, 183), (163, 184), (163, 185), (163, 186), (163, 187), (163, 188), (163, 189), (163, 190), (163, 191), (163, 192), (163, 193), (163, 194), (163, 196), (164, 168), (164, 170), (164, 171), (164, 172), (164, 173), (164, 174), (164, 175), (164, 176), (164, 177), (164, 178), (164, 179), (164, 180), (164, 181), (164, 182), (164, 183), (164, 184), (164, 185), (164, 186), (164, 187), (164, 188), (164, 189), (164, 190), (164, 191), (164, 192), (164, 193), (164, 194), (164, 196), (165, 168), (165, 170), (165, 171), (165, 172),
(165, 173), (165, 174), (165, 175), (165, 176), (165, 177), (165, 178), (165, 179), (165, 180), (165, 181), (165, 182), (165, 183), (165, 184), (165, 185), (165, 186), (165, 187), (165, 188), (165, 189), (165, 190), (165, 191), (165, 192), (165, 193), (165, 194), (165, 196), (166, 169), (166, 170), (166, 171), (166, 172), (166, 173), (166, 174), (166, 175), (166, 176), (166, 177), (166, 178), (166, 179), (166, 180), (166, 181), (166, 182), (166, 183), (166, 184), (166, 185), (166, 186), (166, 187), (166, 188), (166, 189), (166, 190), (166, 191), (166, 192), (166, 193), (166, 194), (166, 196), (167, 169), (167, 171), (167, 172), (167, 173), (167, 174), (167, 175), (167, 176), (167, 177), (167, 178), (167, 179), (167, 180), (167, 181), (167, 182), (167, 183), (167, 184), (167, 185), (167, 186), (167, 187), (167, 188), (167, 189), (167, 190), (167, 191),
(167, 192), (167, 193), (167, 194), (167, 196), (168, 169), (168, 171), (168, 172), (168, 173), (168, 174), (168, 175), (168, 176), (168, 177), (168, 178), (168, 179), (168, 180), (168, 181), (168, 182), (168, 183), (168, 184), (168, 185), (168, 186), (168, 187), (168, 188), (168, 189), (168, 190), (168, 191), (168, 192), (168, 193), (168, 194), (168, 196), (169, 170), (169, 172), (169, 173), (169, 174), (169, 175), (169, 176), (169, 177), (169, 178), (169, 179), (169, 180), (169, 181), (169, 182), (169, 183), (169, 184), (169, 185), (169, 186), (169, 187), (169, 188), (169, 189), (169, 190), (169, 191), (169, 192), (169, 193), (169, 194), (169, 196), (170, 171), (170, 172), (170, 173), (170, 174), (170, 175), (170, 176), (170, 177), (170, 178), (170, 179), (170, 180), (170, 181), (170, 182), (170, 183), (170, 184), (170, 185), (170, 186), (170, 187),
(170, 188), (170, 189), (170, 190), (170, 191), (170, 192), (170, 193), (170, 196), (171, 171), (171, 173), (171, 174), (171, 175), (171, 176), (171, 177), (171, 178), (171, 179), (171, 180), (171, 181), (171, 182), (171, 183), (171, 184), (171, 185), (171, 186), (171, 187), (171, 188), (171, 189), (171, 190), (171, 191), (171, 192), (171, 195), (172, 172), (172, 174), (172, 175), (172, 176), (172, 177), (172, 178), (172, 179), (172, 180), (172, 181), (172, 182), (172, 183), (172, 184), (172, 185), (172, 186), (172, 187), (172, 188), (172, 189), (172, 190), (172, 191), (173, 172), (173, 174), (173, 175), (173, 176), (173, 177), (173, 178), (173, 179), (173, 180), (173, 181), (173, 182), (173, 183), (173, 184), (173, 185), (173, 186), (173, 187), (173, 188), (173, 189), (173, 190), (174, 173), (174, 175), (174, 176), (174, 177), (174, 178), (174, 179),
(174, 180), (174, 181), (174, 182), (174, 183), (174, 184), (174, 185), (174, 186), (174, 187), (174, 191), (175, 176), (175, 177), (175, 178), (175, 179), (175, 180), (175, 181), (175, 182), (175, 183), (175, 184), (175, 188), (175, 190), (176, 174), (176, 177), (176, 178), (176, 179), (176, 180), (176, 181), (176, 185), (176, 186), (176, 187), (177, 175), (177, 182), (177, 184), (178, 177), (178, 179), (178, 180), (178, 181), (267, 176), (267, 178), (267, 179), (267, 180), (267, 181), (267, 182), (268, 175), (268, 184), (268, 185), (269, 175), (269, 177), (269, 178), (269, 179), (269, 180), (269, 181), (269, 182), (269, 183), (269, 186), (269, 187), (269, 188), (270, 175), (270, 177), (270, 178), (270, 179), (270, 180), (270, 181), (270, 182), (270, 183), (270, 184), (270, 185), (270, 190), (271, 174), (271, 176), (271, 177), (271, 178), (271, 179),
(271, 180), (271, 181), (271, 182), (271, 183), (271, 184), (271, 185), (271, 186), (271, 187), (271, 188), (271, 190), (272, 174), (272, 176), (272, 177), (272, 178), (272, 179), (272, 180), (272, 181), (272, 182), (272, 183), (272, 184), (272, 185), (272, 186), (272, 187), (272, 188), (272, 190), (273, 173), (273, 175), (273, 176), (273, 177), (273, 178), (273, 179), (273, 180), (273, 181), (273, 182), (273, 183), (273, 184), (273, 185), (273, 186), (273, 187), (273, 188), (273, 189), (273, 191), (274, 173), (274, 175), (274, 176), (274, 177), (274, 178), (274, 179), (274, 180), (274, 181), (274, 182), (274, 183), (274, 184), (274, 185), (274, 186), (274, 187), (274, 188), (274, 189), (274, 191), (275, 172), (275, 174), (275, 175), (275, 176), (275, 177), (275, 178), (275, 179), (275, 180), (275, 181), (275, 182), (275, 183), (275, 184), (275, 185),
(275, 186), (275, 187), (275, 188), (275, 189), (275, 190), (275, 192), (276, 172), (276, 174), (276, 175), (276, 176), (276, 177), (276, 178), (276, 179), (276, 180), (276, 181), (276, 182), (276, 183), (276, 184), (276, 185), (276, 186), (276, 187), (276, 188), (276, 189), (276, 190), (276, 191), (276, 193), (277, 171), (277, 173), (277, 174), (277, 175), (277, 176), (277, 177), (277, 178), (277, 179), (277, 180), (277, 181), (277, 182), (277, 183), (277, 184), (277, 185), (277, 186), (277, 187), (277, 188), (277, 189), (277, 190), (277, 191), (277, 193), (278, 170), (278, 172), (278, 173), (278, 174), (278, 175), (278, 176), (278, 177), (278, 178), (278, 179), (278, 180), (278, 181), (278, 182), (278, 183), (278, 184), (278, 185), (278, 186), (278, 187), (278, 188), (278, 189), (278, 190), (278, 191), (278, 193), (279, 170), (279, 172), (279, 173),
(279, 174), (279, 175), (279, 176), (279, 177), (279, 178), (279, 179), (279, 180), (279, 181), (279, 182), (279, 183), (279, 184), (279, 185), (279, 186), (279, 187), (279, 188), (279, 189), (279, 190), (279, 191), (279, 193), (280, 169), (280, 171), (280, 172), (280, 173), (280, 174), (280, 175), (280, 176), (280, 177), (280, 178), (280, 179), (280, 180), (280, 181), (280, 182), (280, 183), (280, 184), (280, 185), (280, 186), (280, 187), (280, 188), (280, 189), (280, 190), (280, 191), (280, 193), (281, 169), (281, 171), (281, 172), (281, 173), (281, 174), (281, 175), (281, 176), (281, 177), (281, 178), (281, 179), (281, 180), (281, 181), (281, 182), (281, 183), (281, 184), (281, 185), (281, 186), (281, 187), (281, 188), (281, 189), (281, 190), (281, 191), (281, 193), (282, 168), (282, 170), (282, 171), (282, 172), (282, 173), (282, 174), (282, 175),
(282, 176), (282, 177), (282, 178), (282, 179), (282, 180), (282, 181), (282, 182), (282, 183), (282, 184), (282, 185), (282, 186), (282, 187), (282, 188), (282, 189), (282, 190), (282, 191), (282, 193), (283, 168), (283, 170), (283, 171), (283, 172), (283, 173), (283, 174), (283, 175), (283, 176), (283, 177), (283, 178), (283, 179), (283, 180), (283, 181), (283, 182), (283, 183), (283, 184), (283, 185), (283, 186), (283, 187), (283, 188), (283, 189), (283, 190), (283, 191), (283, 193), (284, 167), (284, 169), (284, 170), (284, 171), (284, 172), (284, 173), (284, 174), (284, 175), (284, 176), (284, 177), (284, 178), (284, 179), (284, 180), (284, 181), (284, 182), (284, 183), (284, 184), (284, 185), (284, 186), (284, 187), (284, 188), (284, 189), (284, 190), (284, 191), (284, 193), (285, 166), (285, 168), (285, 169), (285, 170), (285, 171), (285, 172),
(285, 173), (285, 174), (285, 175), (285, 176), (285, 177), (285, 178), (285, 179), (285, 180), (285, 181), (285, 182), (285, 183), (285, 184), (285, 185), (285, 186), (285, 187), (285, 188), (285, 189), (285, 190), (285, 191), (285, 193), (286, 166), (286, 168), (286, 169), (286, 170), (286, 171), (286, 172), (286, 173), (286, 174), (286, 175), (286, 176), (286, 177), (286, 178), (286, 179), (286, 180), (286, 181), (286, 182), (286, 183), (286, 184), (286, 185), (286, 186), (286, 187), (286, 188), (286, 189), (286, 190), (286, 191), (286, 193), (287, 165), (287, 167), (287, 168), (287, 169), (287, 170), (287, 171), (287, 172), (287, 173), (287, 174), (287, 175), (287, 176), (287, 177), (287, 178), (287, 179), (287, 180), (287, 181), (287, 182), (287, 183), (287, 184), (287, 185), (287, 186), (287, 187), (287, 188), (287, 189), (287, 190), (287, 191),
(287, 193), (288, 164), (288, 166), (288, 167), (288, 168), (288, 169), (288, 170), (288, 171), (288, 172), (288, 173), (288, 174), (288, 175), (288, 176), (288, 177), (288, 178), (288, 179), (288, 180), (288, 181), (288, 182), (288, 183), (288, 184), (288, 185), (288, 186), (288, 187), (288, 188), (288, 189), (288, 190), (288, 191), (288, 193), (289, 164), (289, 166), (289, 167), (289, 168), (289, 169), (289, 170), (289, 171), (289, 172), (289, 173), (289, 174), (289, 175), (289, 176), (289, 177), (289, 178), (289, 179), (289, 180), (289, 181), (289, 182), (289, 183), (289, 184), (289, 185), (289, 186), (289, 187), (289, 188), (289, 189), (289, 190), (289, 191), (289, 193), (290, 163), (290, 165), (290, 166), (290, 167), (290, 168), (290, 169), (290, 170), (290, 171), (290, 172), (290, 173), (290, 174), (290, 175), (290, 176), (290, 177), (290, 178),
(290, 179), (290, 180), (290, 181), (290, 182), (290, 183), (290, 184), (290, 185), (290, 186), (290, 187), (290, 188), (290, 189), (290, 190), (290, 191), (290, 193), (291, 164), (291, 165), (291, 166), (291, 167), (291, 168), (291, 169), (291, 170), (291, 171), (291, 172), (291, 173), (291, 174), (291, 175), (291, 176), (291, 177), (291, 178), (291, 179), (291, 180), (291, 181), (291, 182), (291, 183), (291, 184), (291, 185), (291, 186), (291, 187), (291, 188), (291, 189), (291, 190), (291, 191), (291, 193), (292, 162), (292, 164), (292, 165), (292, 166), (292, 167), (292, 168), (292, 169), (292, 170), (292, 171), (292, 172), (292, 173), (292, 174), (292, 175), (292, 176), (292, 177), (292, 178), (292, 179), (292, 180), (292, 181), (292, 182), (292, 183), (292, 184), (292, 185), (292, 186), (292, 187), (292, 188), (292, 189), (292, 190), (292, 191),
(292, 193), (293, 161), (293, 163), (293, 164), (293, 165), (293, 166), (293, 167), (293, 168), (293, 169), (293, 170), (293, 171), (293, 172), (293, 173), (293, 174), (293, 175), (293, 176), (293, 177), (293, 178), (293, 179), (293, 180), (293, 181), (293, 182), (293, 183), (293, 184), (293, 185), (293, 186), (293, 187), (293, 188), (293, 189), (293, 190), (293, 191), (293, 193), (294, 160), (294, 162), (294, 163), (294, 164), (294, 165), (294, 166), (294, 167), (294, 168), (294, 169), (294, 170), (294, 171), (294, 172), (294, 173), (294, 174), (294, 175), (294, 176), (294, 177), (294, 178), (294, 179), (294, 180), (294, 181), (294, 182), (294, 183), (294, 184), (294, 185), (294, 186), (294, 187), (294, 188), (294, 189), (294, 190), (294, 191), (294, 193), (295, 160), (295, 162), (295, 163), (295, 164), (295, 165), (295, 166), (295, 167), (295, 168),
(295, 169), (295, 170), (295, 171), (295, 172), (295, 173), (295, 174), (295, 175), (295, 176), (295, 177), (295, 178), (295, 179), (295, 180), (295, 181), (295, 182), (295, 183), (295, 184), (295, 185), (295, 186), (295, 187), (295, 188), (295, 189), (295, 190), (295, 191), (295, 193), (296, 159), (296, 161), (296, 162), (296, 163), (296, 164), (296, 165), (296, 166), (296, 167), (296, 168), (296, 169), (296, 170), (296, 171), (296, 172), (296, 173), (296, 174), (296, 175), (296, 176), (296, 177), (296, 178), (296, 179), (296, 180), (296, 181), (296, 182), (296, 183), (296, 184), (296, 185), (296, 186), (296, 187), (296, 188), (296, 189), (296, 190), (296, 191), (296, 193), (297, 158), (297, 160), (297, 161), (297, 162), (297, 163), (297, 164), (297, 165), (297, 166), (297, 167), (297, 168), (297, 169), (297, 170), (297, 171), (297, 172), (297, 173),
(297, 174), (297, 175), (297, 176), (297, 177), (297, 178), (297, 179), (297, 180), (297, 181), (297, 182), (297, 183), (297, 184), (297, 185), (297, 186), (297, 187), (297, 188), (297, 189), (297, 190), (297, 191), (297, 193), (298, 158), (298, 160), (298, 161), (298, 162), (298, 163), (298, 164), (298, 165), (298, 166), (298, 167), (298, 168), (298, 169), (298, 170), (298, 171), (298, 172), (298, 173), (298, 174), (298, 175), (298, 176), (298, 177), (298, 178), (298, 179), (298, 180), (298, 181), (298, 182), (298, 183), (298, 184), (298, 185), (298, 186), (298, 187), (298, 188), (298, 189), (298, 190), (298, 191), (298, 193), (299, 157), (299, 159), (299, 160), (299, 161), (299, 162), (299, 163), (299, 164), (299, 165), (299, 166), (299, 167), (299, 168), (299, 169), (299, 170), (299, 171), (299, 172), (299, 173), (299, 174), (299, 175), (299, 176),
(299, 177), (299, 178), (299, 179), (299, 180), (299, 181), (299, 182), (299, 183), (299, 184), (299, 185), (299, 186), (299, 187), (299, 188), (299, 189), (299, 190), (299, 191), (299, 193), (300, 156), (300, 158), (300, 159), (300, 160), (300, 161), (300, 162), (300, 163), (300, 164), (300, 165), (300, 166), (300, 167), (300, 168), (300, 169), (300, 170), (300, 171), (300, 172), (300, 173), (300, 174), (300, 175), (300, 176), (300, 177), (300, 178), (300, 179), (300, 180), (300, 181), (300, 182), (300, 183), (300, 184), (300, 185), (300, 186), (300, 187), (300, 188), (300, 189), (300, 190), (300, 191), (300, 193), (301, 155), (301, 157), (301, 158), (301, 159), (301, 160), (301, 161), (301, 162), (301, 163), (301, 164), (301, 165), (301, 166), (301, 167), (301, 168), (301, 169), (301, 170), (301, 171), (301, 172), (301, 173), (301, 174), (301, 175),
(301, 176), (301, 177), (301, 178), (301, 179), (301, 180), (301, 181), (301, 182), (301, 183), (301, 184), (301, 185), (301, 186), (301, 187), (301, 188), (301, 189), (301, 190), (301, 191), (301, 193), (302, 154), (302, 156), (302, 157), (302, 158), (302, 159), (302, 160), (302, 161), (302, 162), (302, 163), (302, 164), (302, 165), (302, 166), (302, 167), (302, 168), (302, 169), (302, 170), (302, 171), (302, 172), (302, 173), (302, 174), (302, 175), (302, 176), (302, 177), (302, 178), (302, 179), (302, 180), (302, 181), (302, 182), (302, 183), (302, 184), (302, 185), (302, 186), (302, 187), (302, 188), (302, 189), (302, 190), (302, 191), (302, 193), (303, 153), (303, 156), (303, 157), (303, 158), (303, 159), (303, 160), (303, 161), (303, 162), (303, 163), (303, 164), (303, 165), (303, 166), (303, 167), (303, 168), (303, 169), (303, 170), (303, 171),
(303, 172), (303, 173), (303, 174), (303, 175), (303, 176), (303, 177), (303, 178), (303, 179), (303, 180), (303, 181), (303, 182), (303, 183), (303, 184), (303, 185), (303, 186), (303, 187), (303, 188), (303, 189), (303, 190), (303, 191), (303, 193), (304, 152), (304, 155), (304, 156), (304, 157), (304, 158), (304, 159), (304, 160), (304, 161), (304, 162), (304, 163), (304, 164), (304, 165), (304, 166), (304, 167), (304, 168), (304, 169), (304, 170), (304, 171), (304, 172), (304, 173), (304, 174), (304, 175), (304, 176), (304, 177), (304, 178), (304, 179), (304, 180), (304, 181), (304, 182), (304, 183), (304, 184), (304, 185), (304, 186), (304, 187), (304, 188), (304, 189), (304, 190), (304, 191), (304, 193), (305, 151), (305, 154), (305, 155), (305, 156), (305, 157), (305, 158), (305, 159), (305, 160), (305, 161), (305, 162), (305, 163), (305, 164),
(305, 165), (305, 166), (305, 167), (305, 168), (305, 169), (305, 170), (305, 171), (305, 172), (305, 173), (305, 174), (305, 175), (305, 176), (305, 177), (305, 178), (305, 179), (305, 180), (305, 181), (305, 182), (305, 183), (305, 184), (305, 185), (305, 186), (305, 187), (305, 188), (305, 189), (305, 190), (305, 191), (305, 193), (306, 152), (306, 154), (306, 155), (306, 156), (306, 157), (306, 158), (306, 159), (306, 160), (306, 161), (306, 162), (306, 163), (306, 164), (306, 165), (306, 166), (306, 167), (306, 168), (306, 169), (306, 170), (306, 171), (306, 172), (306, 173), (306, 174), (306, 175), (306, 176), (306, 177), (306, 178), (306, 179), (306, 180), (306, 181), (306, 182), (306, 183), (306, 184), (306, 185), (306, 186), (306, 187), (306, 188), (306, 189), (306, 190), (306, 193), (307, 152), (307, 154), (307, 155), (307, 156), (307, 157),
(307, 158), (307, 159), (307, 160), (307, 161), (307, 162), (307, 163), (307, 164), (307, 165), (307, 166), (307, 167), (307, 168), (307, 169), (307, 170), (307, 171), (307, 172), (307, 173), (307, 174), (307, 175), (307, 176), (307, 177), (307, 178), (307, 179), (307, 180), (307, 181), (307, 182), (307, 183), (307, 184), (307, 185), (307, 186), (307, 187), (307, 188), (307, 189), (307, 190), (307, 191), (308, 153), (308, 155), (308, 156), (308, 157), (308, 158), (308, 159), (308, 160), (308, 161), (308, 162), (308, 163), (308, 164), (308, 165), (308, 166), (308, 167), (308, 168), (308, 169), (308, 170), (308, 171), (308, 172), (308, 173), (308, 174), (308, 175), (308, 176), (308, 177), (308, 178), (308, 179), (308, 180), (308, 181), (308, 182), (308, 183), (308, 184), (308, 185), (308, 186), (308, 187), (308, 188), (308, 190), (309, 154), (309, 156),
(309, 157), (309, 158), (309, 159), (309, 160), (309, 161), (309, 162), (309, 163), (309, 164), (309, 165), (309, 166), (309, 167), (309, 168), (309, 169), (309, 170), (309, 171), (309, 172), (309, 173), (309, 174), (309, 175), (309, 176), (309, 177), (309, 178), (309, 179), (309, 180), (309, 181), (309, 182), (309, 183), (309, 184), (309, 185), (309, 186), (309, 187), (309, 188), (309, 190), (310, 155), (310, 157), (310, 158), (310, 159), (310, 160), (310, 161), (310, 162), (310, 163), (310, 164), (310, 165), (310, 166), (310, 167), (310, 168), (310, 169), (310, 170), (310, 171), (310, 172), (310, 173), (310, 174), (310, 175), (310, 176), (310, 177), (310, 178), (310, 179), (310, 180), (310, 181), (310, 182), (310, 183), (310, 184), (310, 185), (310, 186), (310, 187), (310, 189), (311, 156), (311, 158), (311, 159), (311, 160), (311, 161), (311, 162),
(311, 163), (311, 164), (311, 165), (311, 166), (311, 167), (311, 168), (311, 169), (311, 170), (311, 171), (311, 172), (311, 173), (311, 174), (311, 175), (311, 176), (311, 177), (311, 178), (311, 179), (311, 180), (311, 181), (311, 182), (311, 183), (311, 184), (311, 185), (311, 186), (311, 187), (311, 189), (312, 157), (312, 159), (312, 160), (312, 161), (312, 162), (312, 163), (312, 164), (312, 165), (312, 166), (312, 167), (312, 168), (312, 169), (312, 170), (312, 171), (312, 172), (312, 173), (312, 174), (312, 175), (312, 176), (312, 177), (312, 178), (312, 179), (312, 180), (312, 181), (312, 182), (312, 183), (312, 184), (312, 185), (312, 186), (312, 189), (313, 158), (313, 161), (313, 162), (313, 163), (313, 164), (313, 165), (313, 166), (313, 167), (313, 168), (313, 169), (313, 170), (313, 171), (313, 172), (313, 173), (313, 174), (313, 175),
(313, 176), (313, 177), (313, 178), (313, 179), (313, 180), (313, 181), (313, 182), (313, 183), (313, 184), (313, 185), (313, 188), (314, 159), (314, 162), (314, 163), (314, 164), (314, 165), (314, 166), (314, 167), (314, 168), (314, 169), (314, 170), (314, 171), (314, 172), (314, 173), (314, 174), (314, 175), (314, 176), (314, 177), (314, 178), (314, 179), (314, 180), (314, 181), (314, 182), (314, 183), (314, 184), (314, 187), (315, 161), (315, 164), (315, 165), (315, 166), (315, 167), (315, 168), (315, 169), (315, 170), (315, 171), (315, 172), (315, 173), (315, 174), (315, 175), (315, 176), (315, 177), (315, 178), (315, 179), (315, 180), (315, 181), (315, 182), (315, 185), (316, 162), (316, 165), (316, 166), (316, 167), (316, 168), (316, 169), (316, 170), (316, 171), (316, 172), (316, 173), (316, 174), (316, 175), (316, 176), (316, 177), (316, 178),
(316, 179), (316, 180), (316, 181), (316, 184), (317, 164), (317, 167), (317, 168), (317, 169), (317, 170), (317, 171), (317, 172), (317, 173), (317, 174), (317, 175), (317, 176), (317, 177), (317, 178), (317, 179), (317, 180), (317, 182), (318, 165), (318, 169), (318, 170), (318, 171), (318, 172), (318, 173), (318, 174), (318, 175), (318, 176), (318, 177), (318, 178), (318, 181), (319, 167), (319, 171), (319, 172), (319, 173), (319, 174), (319, 175), (319, 176), (319, 177), (319, 180), (320, 169), (320, 178), (321, 171), (321, 173), (321, 174), (321, 175), (321, 177), )
coordinates_CC99FF = ((129, 43),
(130, 41), (130, 44), (131, 43), (131, 45), (132, 39), (132, 41), (132, 42), (132, 43), (132, 44), (133, 38), (133, 40), (133, 41), (133, 42), (133, 43), (133, 44), (133, 45), (133, 48), (134, 38), (134, 40), (134, 41), (134, 42), (134, 43), (134, 44), (134, 45), (134, 46), (134, 50), (135, 38), (135, 39), (135, 40), (135, 41), (135, 42), (135, 43), (135, 44), (135, 45), (135, 46), (135, 47), (135, 48), (135, 51), (135, 52), (136, 37), (136, 39), (136, 40), (136, 41), (136, 42), (136, 43), (136, 44), (136, 45), (136, 46), (136, 47), (136, 48), (136, 49), (136, 50), (136, 53), (136, 54), (137, 37), (137, 39), (137, 40), (137, 41), (137, 42), (137, 43), (137, 44), (137, 45), (137, 46), (137, 47), (137, 48), (137, 49), (137, 50), (137, 51), (137, 52), (137, 55), (137, 56), (138, 37),
(138, 39), (138, 40), (138, 41), (138, 42), (138, 43), (138, 44), (138, 45), (138, 46), (138, 47), (138, 48), (138, 49), (138, 50), (138, 51), (138, 52), (138, 53), (138, 54), (138, 57), (138, 59), (139, 37), (139, 39), (139, 40), (139, 41), (139, 42), (139, 43), (139, 44), (139, 45), (139, 46), (139, 47), (139, 48), (139, 49), (139, 50), (139, 51), (139, 52), (139, 53), (139, 54), (139, 55), (139, 56), (139, 59), (140, 37), (140, 39), (140, 40), (140, 41), (140, 42), (140, 43), (140, 44), (140, 45), (140, 46), (140, 47), (140, 48), (140, 49), (140, 50), (140, 51), (140, 52), (140, 53), (140, 54), (140, 55), (140, 56), (140, 57), (140, 59), (141, 37), (141, 39), (141, 40), (141, 41), (141, 42), (141, 43), (141, 44), (141, 45), (141, 46), (141, 47), (141, 48), (141, 49), (141, 50),
(141, 51), (141, 52), (141, 53), (141, 54), (141, 55), (141, 56), (141, 57), (141, 59), (142, 37), (142, 39), (142, 40), (142, 41), (142, 42), (142, 43), (142, 44), (142, 45), (142, 46), (142, 47), (142, 48), (142, 49), (142, 50), (142, 51), (142, 52), (142, 53), (142, 54), (142, 55), (142, 56), (142, 57), (142, 58), (142, 60), (143, 36), (143, 37), (143, 38), (143, 39), (143, 40), (143, 41), (143, 42), (143, 43), (143, 44), (143, 45), (143, 46), (143, 47), (143, 48), (143, 49), (143, 50), (143, 51), (143, 52), (143, 53), (143, 54), (143, 55), (143, 56), (143, 57), (143, 58), (143, 59), (143, 61), (144, 36), (144, 37), (144, 38), (144, 39), (144, 40), (144, 41), (144, 42), (144, 43), (144, 44), (144, 45), (144, 46), (144, 47), (144, 48), (144, 49), (144, 50), (144, 51), (144, 52),
(144, 53), (144, 54), (144, 55), (144, 56), (144, 57), (144, 58), (144, 59), (144, 60), (144, 62), (145, 37), (145, 39), (145, 40), (145, 41), (145, 42), (145, 43), (145, 44), (145, 45), (145, 46), (145, 47), (145, 48), (145, 49), (145, 50), (145, 51), (145, 52), (145, 53), (145, 54), (145, 55), (145, 56), (145, 57), (145, 58), (145, 59), (145, 60), (145, 63), (146, 37), (146, 39), (146, 40), (146, 41), (146, 42), (146, 43), (146, 44), (146, 45), (146, 46), (146, 47), (146, 48), (146, 49), (146, 50), (146, 51), (146, 52), (146, 53), (146, 54), (146, 55), (146, 56), (146, 57), (146, 58), (146, 59), (146, 60), (146, 61), (146, 64), (147, 37), (147, 39), (147, 40), (147, 41), (147, 42), (147, 43), (147, 44), (147, 45), (147, 46), (147, 47), (147, 48), (147, 49), (147, 50), (147, 51),
(147, 52), (147, 53), (147, 54), (147, 55), (147, 56), (147, 57), (147, 58), (147, 59), (147, 60), (147, 61), (147, 62), (147, 65), (148, 37), (148, 39), (148, 40), (148, 41), (148, 42), (148, 43), (148, 44), (148, 45), (148, 46), (148, 47), (148, 48), (148, 49), (148, 50), (148, 51), (148, 52), (148, 53), (148, 54), (148, 55), (148, 56), (148, 57), (148, 58), (148, 59), (148, 60), (148, 61), (148, 62), (148, 63), (148, 64), (148, 66), (149, 37), (149, 39), (149, 40), (149, 41), (149, 42), (149, 43), (149, 44), (149, 45), (149, 46), (149, 47), (149, 48), (149, 49), (149, 50), (149, 51), (149, 52), (149, 53), (149, 54), (149, 55), (149, 56), (149, 57), (149, 58), (149, 59), (149, 60), (149, 61), (149, 62), (149, 63), (149, 64), (149, 65), (149, 67), (150, 37), (150, 39), (150, 40),
(150, 41), (150, 42), (150, 43), (150, 44), (150, 45), (150, 46), (150, 47), (150, 48), (150, 49), (150, 50), (150, 51), (150, 52), (150, 53), (150, 54), (150, 55), (150, 56), (150, 57), (150, 58), (150, 59), (150, 60), (150, 61), (150, 62), (150, 63), (150, 64), (150, 65), (150, 66), (150, 68), (151, 37), (151, 39), (151, 40), (151, 41), (151, 42), (151, 43), (151, 44), (151, 45), (151, 46), (151, 47), (151, 48), (151, 49), (151, 50), (151, 51), (151, 52), (151, 53), (151, 54), (151, 55), (151, 56), (151, 57), (151, 58), (151, 59), (151, 60), (151, 61), (151, 62), (151, 63), (151, 64), (151, 65), (151, 66), (151, 67), (151, 69), (152, 37), (152, 39), (152, 40), (152, 41), (152, 42), (152, 43), (152, 44), (152, 45), (152, 46), (152, 47), (152, 48), (152, 49), (152, 50), (152, 51),
(152, 52), (152, 53), (152, 54), (152, 55), (152, 56), (152, 57), (152, 58), (152, 59), (152, 60), (152, 61), (152, 62), (152, 63), (152, 64), (152, 65), (152, 66), (152, 67), (152, 68), (152, 70), (153, 37), (153, 39), (153, 40), (153, 41), (153, 42), (153, 43), (153, 44), (153, 45), (153, 46), (153, 47), (153, 48), (153, 49), (153, 50), (153, 51), (153, 52), (153, 53), (153, 54), (153, 55), (153, 56), (153, 57), (153, 58), (153, 59), (153, 60), (153, 61), (153, 62), (153, 63), (153, 64), (153, 65), (153, 66), (153, 67), (153, 68), (153, 69), (153, 71), (154, 37), (154, 39), (154, 40), (154, 41), (154, 42), (154, 43), (154, 44), (154, 45), (154, 46), (154, 47), (154, 48), (154, 49), (154, 50), (154, 51), (154, 52), (154, 53), (154, 54), (154, 55), (154, 56), (154, 57), (154, 58),
(154, 59), (154, 60), (154, 61), (154, 62), (154, 63), (154, 64), (154, 65), (154, 66), (154, 67), (154, 68), (154, 69), (154, 71), (155, 37), (155, 39), (155, 40), (155, 41), (155, 42), (155, 43), (155, 44), (155, 45), (155, 46), (155, 47), (155, 48), (155, 49), (155, 50), (155, 51), (155, 52), (155, 53), (155, 54), (155, 55), (155, 56), (155, 57), (155, 58), (155, 59), (155, 60), (155, 61), (155, 62), (155, 63), (155, 64), (155, 65), (155, 66), (155, 67), (155, 68), (155, 69), (155, 71), (156, 37), (156, 39), (156, 40), (156, 41), (156, 42), (156, 43), (156, 44), (156, 45), (156, 46), (156, 47), (156, 48), (156, 49), (156, 50), (156, 51), (156, 52), (156, 53), (156, 54), (156, 55), (156, 56), (156, 57), (156, 58), (156, 59), (156, 60), (156, 61), (156, 62), (156, 63), (156, 64),
(156, 65), (156, 66), (156, 67), (156, 68), (156, 69), (156, 71), (157, 38), (157, 40), (157, 41), (157, 42), (157, 43), (157, 44), (157, 45), (157, 46), (157, 47), (157, 48), (157, 49), (157, 50), (157, 51), (157, 52), (157, 53), (157, 54), (157, 55), (157, 56), (157, 57), (157, 58), (157, 59), (157, 60), (157, 61), (157, 62), (157, 63), (157, 64), (157, 65), (157, 66), (157, 67), (157, 68), (157, 69), (157, 71), (158, 38), (158, 40), (158, 41), (158, 42), (158, 43), (158, 44), (158, 45), (158, 46), (158, 47), (158, 48), (158, 49), (158, 50), (158, 51), (158, 52), (158, 53), (158, 54), (158, 55), (158, 56), (158, 57), (158, 58), (158, 59), (158, 60), (158, 61), (158, 62), (158, 63), (158, 64), (158, 65), (158, 66), (158, 67), (158, 68), (158, 69), (158, 71), (159, 38), (159, 40),
(159, 41), (159, 42), (159, 43), (159, 44), (159, 45), (159, 46), (159, 47), (159, 48), (159, 49), (159, 50), (159, 51), (159, 52), (159, 53), (159, 54), (159, 55), (159, 56), (159, 57), (159, 58), (159, 59), (159, 60), (159, 61), (159, 62), (159, 63), (159, 64), (159, 65), (159, 66), (159, 67), (159, 68), (159, 69), (159, 71), (160, 38), (160, 40), (160, 41), (160, 42), (160, 43), (160, 44), (160, 45), (160, 46), (160, 47), (160, 48), (160, 49), (160, 50), (160, 51), (160, 52), (160, 53), (160, 54), (160, 55), (160, 56), (160, 57), (160, 58), (160, 59), (160, 60), (160, 61), (160, 62), (160, 63), (160, 64), (160, 65), (160, 66), (160, 67), (160, 68), (160, 69), (160, 71), (161, 38), (161, 40), (161, 41), (161, 42), (161, 43), (161, 44), (161, 45), (161, 46), (161, 47), (161, 48),
(161, 49), (161, 50), (161, 51), (161, 52), (161, 53), (161, 54), (161, 55), (161, 56), (161, 57), (161, 58), (161, 59), (161, 60), (161, 61), (161, 62), (161, 63), (161, 64), (161, 65), (161, 66), (161, 67), (161, 68), (161, 69), (161, 71), (162, 38), (162, 40), (162, 41), (162, 42), (162, 43), (162, 44), (162, 45), (162, 46), (162, 47), (162, 48), (162, 49), (162, 50), (162, 51), (162, 52), (162, 53), (162, 54), (162, 55), (162, 56), (162, 57), (162, 58), (162, 59), (162, 60), (162, 61), (162, 62), (162, 63), (162, 64), (162, 65), (162, 66), (162, 67), (162, 68), (162, 69), (162, 71), (163, 38), (163, 39), (163, 40), (163, 41), (163, 42), (163, 43), (163, 44), (163, 45), (163, 46), (163, 47), (163, 48), (163, 49), (163, 50), (163, 51), (163, 52), (163, 53), (163, 54), (163, 55),
(163, 56), (163, 57), (163, 58), (163, 59), (163, 60), (163, 61), (163, 62), (163, 63), (163, 64), (163, 65), (163, 66), (163, 67), (163, 68), (163, 69), (163, 71), (164, 39), (164, 41), (164, 42), (164, 43), (164, 44), (164, 45), (164, 46), (164, 47), (164, 48), (164, 53), (164, 54), (164, 55), (164, 56), (164, 57), (164, 58), (164, 59), (164, 60), (164, 61), (164, 62), (164, 63), (164, 64), (164, 65), (164, 66), (164, 67), (164, 68), (164, 69), (164, 71), (165, 39), (165, 48), (165, 49), (165, 50), (165, 51), (165, 52), (165, 53), (165, 61), (165, 62), (165, 63), (165, 64), (165, 65), (165, 66), (165, 67), (165, 68), (165, 69), (165, 71), (166, 39), (166, 41), (166, 42), (166, 43), (166, 44), (166, 45), (166, 46), (166, 47), (166, 48), (166, 53), (166, 54), (166, 55), (166, 56),
(166, 57), (166, 58), (166, 59), (166, 60), (166, 63), (166, 64), (166, 65), (166, 66), (166, 71), (167, 61), (167, 64), (167, 68), (168, 63), (168, 66), (169, 64), (273, 71), (274, 68), (274, 71), (275, 64), (275, 66), (275, 69), (275, 71), (276, 63), (276, 68), (276, 69), (276, 71), (277, 61), (277, 64), (277, 65), (277, 66), (277, 67), (277, 68), (277, 69), (277, 71), (278, 58), (278, 59), (278, 63), (278, 64), (278, 65), (278, 66), (278, 67), (278, 68), (278, 69), (278, 71), (279, 54), (279, 55), (279, 56), (279, 61), (279, 62), (279, 63), (279, 64), (279, 65), (279, 66), (279, 67), (279, 68), (279, 69), (279, 71), (280, 49), (280, 50), (280, 51), (280, 52), (280, 53), (280, 57), (280, 58), (280, 59), (280, 60), (280, 61), (280, 62), (280, 63), (280, 64), (280, 65), (280, 66),
(280, 67), (280, 68), (280, 69), (280, 71), (281, 42), (281, 44), (281, 45), (281, 46), (281, 47), (281, 48), (281, 54), (281, 55), (281, 56), (281, 57), (281, 58), (281, 59), (281, 60), (281, 61), (281, 62), (281, 63), (281, 64), (281, 65), (281, 66), (281, 67), (281, 68), (281, 69), (281, 71), (282, 42), (282, 49), (282, 50), (282, 51), (282, 52), (282, 53), (282, 54), (282, 55), (282, 56), (282, 57), (282, 58), (282, 59), (282, 60), (282, 61), (282, 62), (282, 63), (282, 64), (282, 65), (282, 66), (282, 67), (282, 68), (282, 69), (282, 71), (283, 42), (283, 44), (283, 45), (283, 46), (283, 47), (283, 48), (283, 49), (283, 50), (283, 51), (283, 52), (283, 53), (283, 54), (283, 55), (283, 56), (283, 57), (283, 58), (283, 59), (283, 60), (283, 61), (283, 62), (283, 63), (283, 64),
(283, 65), (283, 66), (283, 67), (283, 68), (283, 69), (283, 71), (284, 41), (284, 43), (284, 44), (284, 45), (284, 46), (284, 47), (284, 48), (284, 49), (284, 50), (284, 51), (284, 52), (284, 53), (284, 54), (284, 55), (284, 56), (284, 57), (284, 58), (284, 59), (284, 60), (284, 61), (284, 62), (284, 63), (284, 64), (284, 65), (284, 66), (284, 67), (284, 68), (284, 70), (285, 40), (285, 42), (285, 43), (285, 44), (285, 45), (285, 46), (285, 47), (285, 48), (285, 49), (285, 50), (285, 51), (285, 52), (285, 53), (285, 54), (285, 55), (285, 56), (285, 57), (285, 58), (285, 59), (285, 60), (285, 61), (285, 62), (285, 63), (285, 64), (285, 65), (285, 66), (285, 67), (285, 68), (285, 70), (286, 40), (286, 42), (286, 43), (286, 44), (286, 45), (286, 46), (286, 47), (286, 48), (286, 49),
(286, 50), (286, 51), (286, 52), (286, 53), (286, 54), (286, 55), (286, 56), (286, 57), (286, 58), (286, 59), (286, 60), (286, 61), (286, 62), (286, 63), (286, 64), (286, 65), (286, 66), (286, 67), (286, 68), (286, 70), (287, 39), (287, 41), (287, 42), (287, 43), (287, 44), (287, 45), (287, 46), (287, 47), (287, 48), (287, 49), (287, 50), (287, 51), (287, 52), (287, 53), (287, 54), (287, 55), (287, 56), (287, 57), (287, 58), (287, 59), (287, 60), (287, 61), (287, 62), (287, 63), (287, 64), (287, 65), (287, 66), (287, 67), (287, 68), (287, 70), (288, 38), (288, 40), (288, 41), (288, 42), (288, 43), (288, 44), (288, 45), (288, 46), (288, 47), (288, 48), (288, 49), (288, 50), (288, 51), (288, 52), (288, 53), (288, 54), (288, 55), (288, 56), (288, 57), (288, 58), (288, 59), (288, 60),
(288, 61), (288, 62), (288, 63), (288, 64), (288, 65), (288, 66), (288, 67), (288, 68), (288, 70), (289, 37), (289, 39), (289, 40), (289, 41), (289, 42), (289, 43), (289, 44), (289, 45), (289, 46), (289, 47), (289, 48), (289, 49), (289, 50), (289, 51), (289, 52), (289, 53), (289, 54), (289, 55), (289, 56), (289, 57), (289, 58), (289, 59), (289, 60), (289, 61), (289, 62), (289, 63), (289, 64), (289, 65), (289, 66), (289, 67), (289, 69), (290, 36), (290, 38), (290, 39), (290, 40), (290, 41), (290, 42), (290, 43), (290, 44), (290, 45), (290, 46), (290, 47), (290, 48), (290, 49), (290, 50), (290, 51), (290, 52), (290, 53), (290, 54), (290, 55), (290, 56), (290, 57), (290, 58), (290, 59), (290, 60), (290, 61), (290, 62), (290, 63), (290, 64), (290, 65), (290, 69), (291, 35), (291, 37),
(291, 38), (291, 39), (291, 40), (291, 41), (291, 42), (291, 43), (291, 44), (291, 45), (291, 46), (291, 47), (291, 48), (291, 49), (291, 50), (291, 51), (291, 52), (291, 53), (291, 54), (291, 55), (291, 56), (291, 57), (291, 58), (291, 59), (291, 60), (291, 61), (291, 62), (291, 63), (291, 64), (291, 68), (292, 34), (292, 36), (292, 37), (292, 38), (292, 39), (292, 40), (292, 41), (292, 42), (292, 43), (292, 44), (292, 45), (292, 46), (292, 47), (292, 48), (292, 49), (292, 50), (292, 51), (292, 52), (292, 53), (292, 54), (292, 55), (292, 56), (292, 57), (292, 58), (292, 59), (292, 60), (292, 61), (292, 62), (292, 63), (292, 65), (293, 33), (293, 35), (293, 36), (293, 37), (293, 38), (293, 39), (293, 40), (293, 41), (293, 42), (293, 43), (293, 44), (293, 45), (293, 46), (293, 47),
(293, 48), (293, 49), (293, 50), (293, 51), (293, 52), (293, 53), (293, 54), (293, 55), (293, 56), (293, 57), (293, 58), (293, 59), (293, 60), (293, 61), (293, 62), (293, 64), (294, 33), (294, 35), (294, 36), (294, 37), (294, 38), (294, 39), (294, 40), (294, 41), (294, 42), (294, 43), (294, 44), (294, 45), (294, 46), (294, 47), (294, 48), (294, 49), (294, 50), (294, 51), (294, 52), (294, 53), (294, 54), (294, 55), (294, 56), (294, 57), (294, 58), (294, 59), (294, 60), (294, 61), (294, 63), (295, 32), (295, 34), (295, 35), (295, 36), (295, 37), (295, 38), (295, 39), (295, 40), (295, 41), (295, 42), (295, 43), (295, 44), (295, 45), (295, 46), (295, 47), (295, 48), (295, 49), (295, 50), (295, 51), (295, 52), (295, 53), (295, 54), (295, 55), (295, 56), (295, 57), (295, 58), (295, 59),
(295, 60), (295, 62), (296, 32), (296, 33), (296, 34), (296, 35), (296, 36), (296, 37), (296, 38), (296, 39), (296, 40), (296, 41), (296, 42), (296, 43), (296, 44), (296, 45), (296, 46), (296, 47), (296, 48), (296, 49), (296, 50), (296, 51), (296, 52), (296, 53), (296, 54), (296, 55), (296, 56), (296, 57), (296, 58), (296, 59), (296, 60), (296, 62), (297, 33), (297, 35), (297, 36), (297, 37), (297, 38), (297, 39), (297, 40), (297, 41), (297, 42), (297, 43), (297, 44), (297, 45), (297, 46), (297, 47), (297, 48), (297, 49), (297, 50), (297, 51), (297, 52), (297, 53), (297, 54), (297, 55), (297, 56), (297, 57), (297, 58), (297, 59), (297, 61), (298, 33), (298, 35), (298, 36), (298, 37), (298, 38), (298, 39), (298, 40), (298, 41), (298, 42), (298, 43), (298, 44), (298, 45), (298, 46),
(298, 47), (298, 48), (298, 49), (298, 50), (298, 51), (298, 52), (298, 53), (298, 54), (298, 55), (298, 56), (298, 57), (298, 58), (298, 59), (298, 61), (299, 33), (299, 35), (299, 36), (299, 37), (299, 38), (299, 39), (299, 40), (299, 41), (299, 42), (299, 43), (299, 44), (299, 45), (299, 46), (299, 47), (299, 48), (299, 49), (299, 50), (299, 51), (299, 52), (299, 53), (299, 54), (299, 55), (299, 56), (299, 57), (299, 58), (299, 60), (300, 33), (300, 35), (300, 36), (300, 37), (300, 38), (300, 39), (300, 40), (300, 41), (300, 42), (300, 43), (300, 44), (300, 45), (300, 46), (300, 47), (300, 48), (300, 49), (300, 50), (300, 51), (300, 52), (300, 53), (300, 54), (300, 55), (300, 56), (300, 57), (300, 59), (301, 34), (301, 36), (301, 37), (301, 38), (301, 39), (301, 40), (301, 41),
(301, 42), (301, 43), (301, 44), (301, 45), (301, 46), (301, 47), (301, 48), (301, 49), (301, 50), (301, 51), (301, 52), (301, 53), (301, 54), (301, 55), (301, 56), (301, 58), (302, 34), (302, 36), (302, 37), (302, 38), (302, 39), (302, 40), (302, 41), (302, 42), (302, 43), (302, 44), (302, 45), (302, 46), (302, 47), (302, 48), (302, 49), (302, 50), (302, 51), (302, 52), (302, 53), (302, 54), (302, 55), (302, 57), (303, 34), (303, 36), (303, 37), (303, 38), (303, 39), (303, 40), (303, 41), (303, 42), (303, 43), (303, 44), (303, 45), (303, 46), (303, 47), (303, 48), (303, 49), (303, 50), (303, 51), (303, 52), (303, 53), (303, 54), (303, 55), (303, 57), (304, 35), (304, 37), (304, 38), (304, 39), (304, 40), (304, 41), (304, 42), (304, 43), (304, 44), (304, 45), (304, 46), (304, 47),
(304, 48), (304, 49), (304, 50), (304, 51), (304, 52), (304, 53), (304, 54), (304, 56), (305, 35), (305, 37), (305, 38), (305, 39), (305, 40), (305, 41), (305, 42), (305, 43), (305, 44), (305, 45), (305, 46), (305, 47), (305, 48), (305, 49), (305, 50), (305, 51), (305, 52), (305, 53), (305, 54), (305, 56), (306, 36), (306, 38), (306, 39), (306, 40), (306, 41), (306, 42), (306, 43), (306, 44), (306, 45), (306, 46), (306, 47), (306, 48), (306, 49), (306, 50), (306, 51), (306, 52), (306, 53), (306, 54), (307, 36), (307, 38), (307, 39), (307, 40), (307, 41), (307, 42), (307, 43), (307, 44), (307, 45), (307, 46), (307, 47), (307, 48), (307, 49), (307, 50), (307, 51), (307, 52), (307, 53), (307, 55), (308, 36), (308, 38), (308, 39), (308, 40), (308, 41), (308, 42), (308, 43), (308, 44),
(308, 45), (308, 46), (308, 47), (308, 48), (308, 49), (308, 50), (308, 51), (308, 52), (309, 37), (309, 39), (309, 40), (309, 41), (309, 42), (309, 43), (309, 44), (309, 45), (309, 46), (309, 47), (309, 48), (309, 49), (309, 50), (309, 51), (310, 37), (310, 39), (310, 40), (310, 41), (310, 42), (310, 43), (310, 44), (310, 45), (310, 46), (310, 47), (310, 48), (310, 49), (310, 50), (310, 52), (311, 38), (311, 40), (311, 41), (311, 42), (311, 43), (311, 44), (311, 45), (311, 46), (311, 47), (311, 48), (311, 49), (311, 51), (312, 40), (312, 41), (312, 42), (312, 43), (312, 44), (312, 45), (312, 46), (312, 47), (312, 48), (312, 50), (313, 39), (313, 41), (313, 42), (313, 43), (313, 44), (313, 45), (313, 46), (313, 47), (313, 49), (314, 40), (314, 42), (314, 43), (314, 44), (314, 45),
(314, 46), (314, 48), (315, 40), (315, 42), (315, 43), (315, 44), (315, 45), (315, 47), (316, 41), (316, 43), (316, 44), (316, 46), (317, 42), (317, 45), (318, 43), (318, 44), )
| 773.870564
| 865
| 0.483406
|
95fe6a08f20abbeb1fd3af69ebd4ff2ec8e7ff6d
| 2,444
|
py
|
Python
|
compare_df/MatchingRecords.py
|
Sanyam15/compare_df
|
8ab1a51196d22dcd597e9b31066cb4046de2c8ac
|
[
"MIT"
] | null | null | null |
compare_df/MatchingRecords.py
|
Sanyam15/compare_df
|
8ab1a51196d22dcd597e9b31066cb4046de2c8ac
|
[
"MIT"
] | null | null | null |
compare_df/MatchingRecords.py
|
Sanyam15/compare_df
|
8ab1a51196d22dcd597e9b31066cb4046de2c8ac
|
[
"MIT"
] | null | null | null |
'''
This function returns a dataframe which contains identical records for the
passed list of columns (default :- all columns) in the two dataframes.
'''
import pandas as pd
def checkError(dataframe1, dataframe2, common_columns):
#Error : If no columns are there to compare
if len(common_columns) == 0:
raise ValueError(
'Data Error : The parameter -> "common_columns" is empty or dataframe has no columns'
)
# Error : If the param -> 'common_columns' has column/s which do not exist
res1 = [ele for ele in common_columns if ele not in list(dataframe1.columns)]
res2 = [ele for ele in common_columns if ele not in list(dataframe2.columns)]
if len(res1) > 0 and len(res2) > 0:
raise KeyError(
"Data Error : Could not find columns: "
+ str(res1)
+ " in dataframe1 and columns:"
+ str(res2)
+ " in dataframe2"
)
elif len(res1) > 0:
raise KeyError(
"Data Error : Could not find columns: " + str(res1) + " in dataframe1"
)
elif len(res2) > 0:
raise KeyError(
"Data Error : Could not find columns: " + str(res2) + " in dataframe2"
)
def getMatchingRecords(
dataframe1=None,
dataframe2=None,
common_columns=None,
):
"""
:rtype: Pandas DataFrame
:param dataframe1: The first Input DataFrame(X)
:param dataframe2:The second Input DataFrame(X)
:param common_columns: The list of columns for which the two dataframes have to be compared. default : All columns of dataframe1
"""
# Error : If either of dataframe is not pandas dataframe
if not isinstance(dataframe1, pd.DataFrame):
raise TypeError('Expects pd.DataFrame for the parameter -> "dataframe1"')
if not isinstance(dataframe2, pd.DataFrame):
raise TypeError('Expects pd.DataFrame for the parameter -> "dataframe2"')
#Setting default argument for common_columns
if common_columns is None:
common_columns = list(dataframe1.columns)
#Check For Errors
checkError(dataframe1, dataframe2, common_columns)
#Selecting the required columns
dataframe1 = dataframe1[common_columns]
dataframe2 = dataframe2[common_columns]
return pd.merge(dataframe1, dataframe2, on=list(dataframe1.columns))
| 37.030303
| 140
| 0.63216
|
a6f604bf9c0832b1019c1dcd8108bd6b37c9609f
| 2,637
|
py
|
Python
|
backend/dating/models.py
|
crowdbotics-apps/me-challenge-28814
|
894d4b4384167ca8e22ea7530185dcd736a64f29
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/dating/models.py
|
crowdbotics-apps/me-challenge-28814
|
894d4b4384167ca8e22ea7530185dcd736a64f29
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/dating/models.py
|
crowdbotics-apps/me-challenge-28814
|
894d4b4384167ca8e22ea7530185dcd736a64f29
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.conf import settings
from django.db import models
class Setting(models.Model):
"Generated Model"
maximum_distance = models.IntegerField()
gender = models.CharField(
max_length=256,
)
age_range = models.IntegerField()
show_me_on_searches = models.BooleanField()
new_matches_notification = models.BooleanField()
message_notification = models.BooleanField()
message_likes_notification = models.BooleanField()
super_like_notification = models.BooleanField()
in_app_vibrations = models.BooleanField()
user = models.ForeignKey(
"users.User",
null=True,
blank=True,
on_delete=models.CASCADE,
related_name="setting_user",
)
class Dislike(models.Model):
"Generated Model"
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="dislike_owner",
)
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="dislike_user",
)
class Profile(models.Model):
"Generated Model"
bio = models.TextField()
school = models.TextField()
date_of_birth = models.DateField()
created = models.DateField(
auto_now_add=True,
)
modified = models.DateField(
auto_now=True,
)
user = models.OneToOneField(
"users.User",
null=True,
blank=True,
on_delete=models.CASCADE,
related_name="profile_user",
)
class Inbox(models.Model):
"Generated Model"
slug = models.SlugField(
max_length=50,
)
created = models.DateTimeField(
auto_now_add=True,
)
class UserPhoto(models.Model):
"Generated Model"
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="userphoto_user",
)
photo = models.URLField()
class Match(models.Model):
"Generated Model"
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="match_user",
)
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="match_owner",
)
created = models.DateTimeField(
auto_now_add=True,
)
class Like(models.Model):
"Generated Model"
owner = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="like_owner",
)
user = models.ForeignKey(
"users.User",
on_delete=models.CASCADE,
related_name="like_user",
)
super_liked = models.BooleanField()
# Create your models here.
| 23.131579
| 54
| 0.633675
|
d0891b50b3d9c1aa1dd7e2a38b92898e11636fb5
| 4,796
|
py
|
Python
|
data/modelnet/12_1_visualize_gt.py
|
davidstutz/aml-improved-shape-completion
|
9337a0421994199fa218d564cc34a7e7af1a275f
|
[
"Unlicense"
] | 9
|
2018-10-11T08:03:59.000Z
|
2021-11-17T11:16:07.000Z
|
data/modelnet/12_1_visualize_gt.py
|
jtpils/aml-improved-shape-completion
|
9337a0421994199fa218d564cc34a7e7af1a275f
|
[
"Unlicense"
] | 1
|
2019-07-08T16:43:53.000Z
|
2019-11-25T17:08:07.000Z
|
data/modelnet/12_1_visualize_gt.py
|
jtpils/aml-improved-shape-completion
|
9337a0421994199fa218d564cc34a7e7af1a275f
|
[
"Unlicense"
] | 3
|
2018-07-19T13:06:16.000Z
|
2020-12-09T00:59:56.000Z
|
import argparse
import sys
import os
sys.path.insert(1, os.path.realpath(__file__ + '../lib/'))
from blender_utils import *
import common
import json
import re
def read_json(file):
"""
Read a JSON file.
:param file: path to file to read
:type file: str
:return: parsed JSON as dict
:rtype: dict
"""
assert os.path.exists(file), 'file %s not found' % file
with open(file, 'r') as fp:
return json.load(fp)
def read_ordered_directory(dir, extension = None):
"""
Gets a list of file names ordered by integers (if integers are found
in the file names).
:param dir: path to directory
:type dir: str
:param extension: extension to filter for
:type extension: str
:return: list of file names
:rtype: [str]
"""
# http://stackoverflow.com/questions/4623446/how-do-you-sort-files-numerically
def get_int(value):
"""
Convert the input value to integer if possible.
:param value: mixed input value
:type value: mixed
:return: value as integer, or value
:rtype: mixed
"""
try:
return int(value)
except:
return value
def alphanum_key(string):
"""
Turn a string into a list of string and number chunks,
e.g. "z23a" -> ["z", 23, "a"].
:param string: input string
:type string: str
:return: list of elements
:rtype: [int|str]
"""
return [get_int(part) for part in re.split('([0-9]+)', string)]
def sort_filenames(filenames):
"""
Sort the given list by integers if integers are found in the element strings.
:param filenames: file names to sort
:type filenames: [str]
"""
filenames.sort(key = alphanum_key)
assert os.path.exists(dir), 'directory %s not found' % dir
filenames = [dir + '/' + filename for filename in os.listdir(dir)]
if extension is not None:
filenames = [filename for filename in filenames if filename[-len(extension):] == extension]
sort_filenames(filenames)
return filenames
if __name__ == '__main__':
try:
argv = sys.argv[sys.argv.index("--") + 1:]
except ValueError:
log('[Error] "--" not found, call as follows:', LogLevel.ERROR)
log('[Error] $BLENDER --background --python 12_1_visualize_gt.py -- 1>/dev/null config_folder', LogLevel.ERROR)
exit()
if len(argv) < 1:
log('[Error] not enough parameters, call as follows:', LogLevel.ERROR)
log('[Error] $BLENDER --background --python 12_1_visualize_gt.py -- 1>/dev/null config_folder', LogLevel.ERROR)
exit()
config_folder = argv[0] + '/'
assert os.path.exists(config_folder), 'directory %s does not exist' % config_folder
config_files = ['test.json']
for config_file in config_files:
config = read_json(config_folder + config_file)
height = config['height']
width = config['width']
depth = config['depth']
scale = 1./max(height, depth, width)
multiplier = config['multiplier']
n_observations = config['n_observations']
off_directory = common.dirname(config, 'off_gt_dir')
txt_directory = common.dirname(config, 'txt_gt_dir')
import itertools
off_files = read_ordered_directory(off_directory)
n_files = len(off_files)
off_files = list(itertools.chain.from_iterable(itertools.repeat(x, n_observations) for x in off_files))
txt_files = []
for n in range(n_files):
for k in range(n_observations):
txt_files.append(txt_directory + '/%d/%d.txt' % (k, n))
vis_directory = common.dirname(config, 'vis_dir')
if not os.path.isdir(vis_directory):
os.makedirs(vis_directory)
N = 30
log('[Data] %d samples' % len(off_files))
for i in range(N):
n = i * (len(off_files) // N)
off_file = off_files[n]
txt_file = txt_files[n]
camera_target = initialize()
off_material = make_material('BRC_Material_Mesh', (0.66, 0.45, 0.23), 0.8, True)
txt_material = make_material('BRC_Material_Point_Cloud', (0.65, 0.23, 0.25), 1, True)
log('[Data] reading %s' % off_file)
load_off(off_file, off_material, (-width*scale/2, -depth*scale/2, -height*scale/2), scale, 'xzy')
load_txt(txt_file, 0.0075, txt_material, (-width*scale/2, -depth*scale/2, -height*scale/2), scale, 'xzy')
rotation = (5, 0, 125)
distance = 0.5
png_file = vis_directory + '/%d_gt.png' % n
render(camera_target, png_file, rotation, distance)
log('[Data] wrote %s' % png_file)
| 31.142857
| 119
| 0.603003
|
47993c99aa76ce12b9540c5fd58dd3af2e168b25
| 319
|
py
|
Python
|
dynamicprofiles/admin.py
|
jgroszko/django-dynamic-profiles
|
01cbeec4677052a24ce2acc1d61e319d2f99c052
|
[
"BSD-3-Clause"
] | null | null | null |
dynamicprofiles/admin.py
|
jgroszko/django-dynamic-profiles
|
01cbeec4677052a24ce2acc1d61e319d2f99c052
|
[
"BSD-3-Clause"
] | null | null | null |
dynamicprofiles/admin.py
|
jgroszko/django-dynamic-profiles
|
01cbeec4677052a24ce2acc1d61e319d2f99c052
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from dynamicprofiles.models import Profile, ProfileDataGroup, ProfileDataField, ProfileData, ProfileTextData
admin.site.register(Profile)
admin.site.register(ProfileDataGroup)
admin.site.register(ProfileDataField)
admin.site.register(ProfileData)
admin.site.register(ProfileTextData)
| 31.9
| 108
| 0.858934
|
82b0f10fa28f73a1d96c25097653c5847df68298
| 5,013
|
py
|
Python
|
utils/p4runtime_ap.py
|
ramonfontes/tutorials
|
3335911512c14d90a1dddf425c3bc697c73e9581
|
[
"Apache-2.0"
] | 3
|
2019-12-17T13:10:19.000Z
|
2021-03-19T14:16:34.000Z
|
utils/p4runtime_ap.py
|
ramonfontes/tutorials
|
3335911512c14d90a1dddf425c3bc697c73e9581
|
[
"Apache-2.0"
] | null | null | null |
utils/p4runtime_ap.py
|
ramonfontes/tutorials
|
3335911512c14d90a1dddf425c3bc697c73e9581
|
[
"Apache-2.0"
] | 4
|
2019-08-23T03:48:46.000Z
|
2020-07-14T18:26:53.000Z
|
# Copyright 2017-present Barefoot Networks, Inc.
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os, tempfile
from time import sleep
from mn_wifi.node import AP
from mininet.moduledeps import pathCheck
from mininet.log import info, error, debug
from p4_mininet import P4AP, SWITCH_START_TIMEOUT
from netstat import check_listening_on_port
class P4RuntimeAP(P4AP):
"BMv2 ap with gRPC support"
next_grpc_port = 50051
next_thrift_port = 9090
def __init__(self, name, sw_path = None, json_path = None,
grpc_port = None,
thrift_port = None,
pcap_dump = False,
log_console = False,
verbose = False,
device_id = None,
enable_debugger = False,
log_file = None,
**kwargs):
AP.__init__(self, name, **kwargs)
assert (sw_path)
self.sw_path = sw_path
# make sure that the provided sw_path is valid
pathCheck(sw_path)
if json_path is not None:
# make sure that the provided JSON file exists
if not os.path.isfile(json_path):
error("Invalid JSON file.\n")
exit(1)
self.json_path = json_path
else:
self.json_path = None
if grpc_port is not None:
self.grpc_port = grpc_port
else:
self.grpc_port = P4RuntimeAP.next_grpc_port
P4RuntimeAP.next_grpc_port += 1
if thrift_port is not None:
self.thrift_port = thrift_port
else:
self.thrift_port = P4RuntimeAP.next_thrift_port
P4RuntimeAP.next_thrift_port += 1
if check_listening_on_port(self.grpc_port):
error('%s cannot bind port %d because it is bound by another process\n' % (self.name, self.grpc_port))
exit(1)
self.verbose = verbose
logfile = "/tmp/p4s.{}.log".format(self.name)
self.output = open(logfile, 'w')
self.pcap_dump = pcap_dump
self.enable_debugger = enable_debugger
self.log_console = log_console
if log_file is not None:
self.log_file = log_file
else:
self.log_file = "/tmp/p4s.{}.log".format(self.name)
if device_id is not None:
self.device_id = device_id
P4AP.device_id = max(P4AP.device_id, device_id)
else:
self.device_id = P4AP.device_id
P4AP.device_id += 1
self.nanomsg = "ipc:///tmp/bm-{}-log.ipc".format(self.device_id)
def check_ap_started(self, pid):
for _ in range(SWITCH_START_TIMEOUT * 2):
if not os.path.exists(os.path.join("/proc", str(pid))):
return False
if check_listening_on_port(int(self.grpc_port)):
return True
sleep(0.5)
def start(self, controllers):
info("Starting P4 ap {}.\n".format(self.name))
args = [self.sw_path]
for port, intf in self.intfs.items():
if not intf.IP():
args.extend(['-i', str(port) + "@" + intf.name])
#if self.pcap_dump:
# args.append("--pcap %s" % self.pcap_dump)
if self.nanomsg:
args.extend(['--nanolog', self.nanomsg])
args.extend(['--device-id', str(self.device_id)])
P4AP.device_id += 1
if self.json_path:
args.append(self.json_path)
else:
args.append("--no-p4")
if self.enable_debugger:
args.append("--debugger")
if self.log_console:
args.append("--log-console")
if self.thrift_port:
args.append('--thrift-port ' + str(self.thrift_port))
if self.grpc_port:
args.append("-- --grpc-server-addr 0.0.0.0:" + str(self.grpc_port))
cmd = ' '.join(args)
info(cmd + "\n")
pid = None
with tempfile.NamedTemporaryFile() as f:
self.cmd(cmd + ' >' + self.log_file + ' 2>&1 & echo $! >> ' + f.name)
pid = int(f.read())
debug("P4 ap {} PID is {}.\n".format(self.name, pid))
if not self.check_ap_started(pid):
error("P4 ap {} did not start correctly.\n".format(self.name))
exit(1)
info("P4 ap {} has been started.\n".format(self.name))
| 37.133333
| 115
| 0.575903
|
f927c455fad64cdc93e6448db360a65aa5d1e102
| 5,425
|
py
|
Python
|
feature_extraction.py
|
MenglingHettinger/CarND-Behavioral-Cloning-P3
|
e291721fad75cf35e0fa8396665b53bc170a4452
|
[
"MIT"
] | null | null | null |
feature_extraction.py
|
MenglingHettinger/CarND-Behavioral-Cloning-P3
|
e291721fad75cf35e0fa8396665b53bc170a4452
|
[
"MIT"
] | null | null | null |
feature_extraction.py
|
MenglingHettinger/CarND-Behavioral-Cloning-P3
|
e291721fad75cf35e0fa8396665b53bc170a4452
|
[
"MIT"
] | null | null | null |
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import glob
import pickle
import cv2
from sklearn.svm import SVC
def convert_color(img, conv='RGB2YCrCb'):
if conv == 'RGB2YCrCb':
return cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)
if conv == 'BGR2YCrCb':
return cv2.cvtColor(img, cv2.COLOR_BGR2YCrCb)
if conv == 'RGB2LUV':
return cv2.cvtColor(img, cv2.COLOR_RGB2LUV)
def bin_spatial(img, size=(32, 32)):
color1 = cv2.resize(img[:,:,0], size).ravel()
color2 = cv2.resize(img[:,:,1], size).ravel()
color3 = cv2.resize(img[:,:,2], size).ravel()
return np.hstack((color1, color2, color3))
def color_hist(img, nbins=32): #bins_range=(0, 256)
# Compute the histogram of the color channels separately
channel1_hist = np.histogram(img[:,:,0], bins=nbins)
channel2_hist = np.histogram(img[:,:,1], bins=nbins)
channel3_hist = np.histogram(img[:,:,2], bins=nbins)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))
# Return the individual histograms, bin_centers and feature vector
return hist_features
def get_hog_features(img, orient, pix_per_cell, cell_per_block,
vis=False, feature_vec=True):
# Call with two outputs if vis==True
if vis == True:
features, hog_image = hog(img, orientations=orient,
pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block),
block_norm= 'L2-Hys',
transform_sqrt=False,
visualise=vis, feature_vector=feature_vec)
return features, hog_image
# Otherwise call with one output
else:
features = hog(img, orientations=orient,
pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block),
block_norm= 'L2-Hys',
transform_sqrt=False,
visualise=vis, feature_vector=feature_vec)
return features
def find_cars(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell, cell_per_block, spatial_size, hist_bins):
draw_img = np.copy(img)
img = img.astype(np.float32)/255
img_tosearch = img[ystart:ystop,:,:]
ctrans_tosearch = convert_color(img_tosearch, conv='RGB2YCrCb')
if scale != 1:
imshape = ctrans_tosearch.shape
ctrans_tosearch = cv2.resize(ctrans_tosearch, (np.int(imshape[1]/scale), np.int(imshape[0]/scale)))
ch1 = ctrans_tosearch[:,:,0]
ch2 = ctrans_tosearch[:,:,1]
ch3 = ctrans_tosearch[:,:,2]
# Define blocks and steps as above
nxblocks = (ch1.shape[1] // pix_per_cell) - cell_per_block + 1
nyblocks = (ch1.shape[0] // pix_per_cell) - cell_per_block + 1
nfeat_per_block = orient*cell_per_block**2
# 64 was the orginal sampling rate, with 8 cells and 8 pix per cell
window = 64
nblocks_per_window = (window // pix_per_cell) - cell_per_block + 1
cells_per_step = 2 # Instead of overlap, define how many cells to step
nxsteps = (nxblocks - nblocks_per_window) // cells_per_step + 1
nysteps = (nyblocks - nblocks_per_window) // cells_per_step + 1
# Compute individual channel HOG features for the entire image
hog1 = get_hog_features(ch1, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog2 = get_hog_features(ch2, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog3 = get_hog_features(ch3, orient, pix_per_cell, cell_per_block, feature_vec=False)
for xb in range(nxsteps):
for yb in range(nysteps):
ypos = yb*cells_per_step
xpos = xb*cells_per_step
# Extract HOG for this patch
hog_feat1 = hog1[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_feat2 = hog2[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_feat3 = hog3[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_features = np.hstack((hog_feat1, hog_feat2, hog_feat3))
xleft = xpos*pix_per_cell
ytop = ypos*pix_per_cell
# Extract the image patch
subimg = cv2.resize(ctrans_tosearch[ytop:ytop+window, xleft:xleft+window], (64,64))
# Get color features
spatial_features = bin_spatial(subimg, size=spatial_size)
hist_features = color_hist(subimg, nbins=hist_bins)
# Scale features and make a prediction
test_features = X_scaler.transform(np.hstack((spatial_features, hist_features, hog_features)).reshape(1, -1))
#test_features = X_scaler.transform(np.hstack((shape_feat, hist_feat)).reshape(1, -1))
test_prediction = svc.predict(test_features)
if test_prediction == 1:
xbox_left = np.int(xleft*scale)
ytop_draw = np.int(ytop*scale)
win_draw = np.int(window*scale)
cv2.rectangle(draw_img,(xbox_left, ytop_draw+ystart),(xbox_left+win_draw,ytop_draw+win_draw+ystart),(0,0,255),6)
return draw_img
| 45.588235
| 129
| 0.638157
|
f84aa2603f64ebe57ed19960a9b59f7678eaeddb
| 1,082
|
py
|
Python
|
first_steps_with_tensor_flow.py
|
chenmich/google-ml-crash-course-exercises
|
d610f890d53b1537a3ce80531ce1ff2df1f5dc84
|
[
"MIT"
] | null | null | null |
first_steps_with_tensor_flow.py
|
chenmich/google-ml-crash-course-exercises
|
d610f890d53b1537a3ce80531ce1ff2df1f5dc84
|
[
"MIT"
] | null | null | null |
first_steps_with_tensor_flow.py
|
chenmich/google-ml-crash-course-exercises
|
d610f890d53b1537a3ce80531ce1ff2df1f5dc84
|
[
"MIT"
] | null | null | null |
#setup
import math
from matplotlib import cm
from matplotlib import gridspec
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow.python.data import Dataset
tf.logging.set_verbosity(tf.logging.ERROR)
pd.set_option('display.max_row', 10)
pd.set_option('display.float_format', '{: .1f}'.format)
#load data
california_housing_dataframe = pd.read_csv(
"https://storage.googleapis.com/mledu-datasets/california_housing_train.csv", sep=",")
california_housing_dataframe = california_housing_dataframe.reindex(
np.random.permutation(california_housing_dataframe.index)
)
california_housing_dataframe['median_house_value'] /= 1000.0
california_housing_dataframe
#examine the data
california_housing_dataframe.describe()
#build the first model
#stip1:define Feature and configure Feature Columns
#define the input feature: total_rooms
my_feature = california_housing_dataframe[["total_rooms"]]
#configure a numeric feature column for total_rooms
feature_columns = [tf.feature_column.numeric_column('total_rooms')]
| 30.914286
| 90
| 0.820702
|
b1f445c3af5c767ef2835f57e660b61901abbf8a
| 2,626
|
py
|
Python
|
multivariate_time_series_rnn/datasets/custom_mono_split.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | 1
|
2021-12-10T10:14:55.000Z
|
2021-12-10T10:14:55.000Z
|
multivariate_time_series_rnn/datasets/custom_mono_split.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | null | null | null |
multivariate_time_series_rnn/datasets/custom_mono_split.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | null | null | null |
from path import Path
from random import random
import argparse
import pandas as pd
import random
import numpy as np
def writelines(list,path):
lenth = len(list)
with open(path,'w') as f:
for i in range(lenth):
if i == lenth-1:
f.writelines(str(list[i]))
else:
f.writelines(str(list[i])+'\n')
def readlines(filename):
"""Read all the lines in a text file and return as a list
"""
with open(filename, 'r') as f:
lines = f.read().splitlines()
return lines
def parse_args():
parser = argparse.ArgumentParser(
description='custom dataset split for training ,validation and test')
parser.add_argument('--dataset_path', type=str,default='/home/roit/datasets/huawei2021/pb-data2/post/A_h_CW_detection_post.csv',help='csv file')
parser.add_argument("--num",
# default=1000,
default=None
)
parser.add_argument('--seq_len',default=5)
parser.add_argument("--proportion",default=[0.8,0.1,0.1],help="train, val, test")
parser.add_argument("--rand_seed",default=12346)
parser.add_argument("--out_dir",default='../splits/CTMNT')
return parser.parse_args()
def main(args):
'''
:param args:
:return:none , output is a dir includes 3 .txt files
'''
[train_,val_,test_] = args.proportion
out_num = args.num
if train_+val_+test_-1.>0.01:#delta
print('erro')
return
seq_len = args.seq_len
out_dir = Path(args.out_dir)
out_dir.mkdir_p()
train_txt_p = out_dir/'train.txt'
val_txt_p = out_dir/'val.txt'
test_txt_p = out_dir/'test.txt'
dataset_path = Path(args.dataset_path)
df = pd.read_csv(dataset_path)
dataset_length = len(df)
item_list = list(np.array(np.linspace(seq_len,dataset_length-1,dataset_length-seq_len),dtype=np.int))
random.seed(args.rand_seed)
# random.shuffle(item_list)
length = len(item_list)
train_bound = int(length * args.proportion[0])
val_bound = int(length * args.proportion[1]) + train_bound
test_bound = int(length * args.proportion[2]) + val_bound
print(" train items:{}\n val items:{}\n test items:{}".format(len(item_list[:train_bound]), len(item_list[train_bound:val_bound]), len(item_list[val_bound:test_bound])))
writelines(item_list[:train_bound],train_txt_p)
writelines(item_list[train_bound:val_bound],val_txt_p)
writelines(item_list[val_bound:test_bound],test_txt_p)
if __name__ == '__main__':
options = parse_args()
main(options)
| 24.542056
| 173
| 0.646992
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.